Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.collectDesc


                                     TextRecordReader.class, TextRecordReader.class,
                                     PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
      Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);

      // Collect operator to observe the output of the script
      CollectDesc cd = new CollectDesc(Integer.valueOf(10));
      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
          cd, sop);

      op.initialize(new JobConf(TestOperators.class),
          new ObjectInspector[] {r[0].oi});
View Full Code Here


      LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo =
        new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc>();
      pathToPartitionInfo.put("hdfs:///testDir", pd);

      // initialize aliasToWork
      CollectDesc cd = new CollectDesc(Integer.valueOf(1));
      CollectOperator cdop1 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
      cdop1.setConf(cd);
      CollectOperator cdop2 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
View Full Code Here

                                     TextRecordReader.class, TextRecordReader.class,
                                     PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
      Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);

      // Collect operator to observe the output of the script
      CollectDesc cd = new CollectDesc(Integer.valueOf(10));
      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
          cd, sop);

      op.initialize(new JobConf(TestOperators.class),
          new ObjectInspector[] {r[0].oi});
View Full Code Here

      LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo =
        new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc>();
      pathToPartitionInfo.put("/testDir", pd);

      // initialize aliasToWork
      CollectDesc cd = new CollectDesc(Integer.valueOf(1));
      CollectOperator cdop1 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
      cdop1.setConf(cd);
      CollectOperator cdop2 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
View Full Code Here

                                     TextRecordReader.class, TextRecordReader.class,
                                     PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
      Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);

      // Collect operator to observe the output of the script
      CollectDesc cd = new CollectDesc(Integer.valueOf(10));
      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
          cd, sop);

      op.initialize(new JobConf(TestOperators.class),
          new ObjectInspector[] {r[0].oi});
View Full Code Here

      LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo =
        new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc>();
      pathToPartitionInfo.put("/testDir", pd);

      // initialize aliasToWork
      CollectDesc cd = new CollectDesc(Integer.valueOf(1));
      CollectOperator cdop1 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
      cdop1.setConf(cd);
      CollectOperator cdop2 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
View Full Code Here

    List<ExprNodeDesc> expressionList = getExpressionList();
    SelectDesc selectCtx = new SelectDesc(expressionList,
        OperatorTestUtils.createOutputColumnNames(expressionList));
    Operator<SelectDesc> op = OperatorFactory.get(SelectDesc.class);
    op.setConf(selectCtx);
    CollectDesc cd = new CollectDesc(Integer.valueOf(10));
    CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, op);
    op.initialize(new JobConf(OperatorTestUtils.class), new ObjectInspector[] {data[0].oi});
    OperatorTestUtils.assertResults(op, cdop, data, getExpectedResult());
  }
View Full Code Here

                                     TextRecordReader.class, TextRecordReader.class,
                                     PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
      Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);

      // Collect operator to observe the output of the script
      CollectDesc cd = new CollectDesc(Integer.valueOf(10));
      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
          cd, sop);

      op.initialize(new JobConf(TestOperators.class),
          new ObjectInspector[] {r[0].oi});
View Full Code Here

      LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo =
        new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.PartitionDesc>();
      pathToPartitionInfo.put("hdfs:///testDir", pd);

      // initialize aliasToWork
      CollectDesc cd = new CollectDesc(Integer.valueOf(1));
      CollectOperator cdop1 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
      cdop1.setConf(cd);
      CollectOperator cdop2 = (CollectOperator) OperatorFactory
          .get(CollectDesc.class);
View Full Code Here

                                     TextRecordReader.class, TextRecordReader.class,
                                     PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
      Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);

      // Collect operator to observe the output of the script
      CollectDesc cd = new CollectDesc(Integer.valueOf(10));
      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
          cd, sop);

      op.initialize(new JobConf(TestOperators.class),
          new ObjectInspector[]{r[0].oi});
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.collectDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.