Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc


    ExprNodeDesc lhs = new ExprNodeGenericFuncDesc(
        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
        serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);

    ArrayList<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
    children2.add(new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
        .valueOf(100)));
    ExprNodeDesc rhs = new ExprNodeGenericFuncDesc(
        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
        serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
View Full Code Here


    try {
      System.out.println("Testing Filter Operator");
      ExprNodeDesc col0 = TestExecDriver.getStringColumn("col0");
      ExprNodeDesc col1 = TestExecDriver.getStringColumn("col1");
      ExprNodeDesc col2 = TestExecDriver.getStringColumn("col2");
      ExprNodeDesc zero = new ExprNodeConstantDesc("0");
      ExprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc(">", col2, col1);
      ExprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("==", col0, zero);
      ExprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor
View Full Code Here

      // col1
      ExprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");

      // col2
      ExprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
      ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
      ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("concat", expr1, expr2);

      // select operator to project these two columns
      ArrayList<ExprNodeDesc> earr = new ArrayList<ExprNodeDesc>();
View Full Code Here

      groupingSetsBitSet = new ArrayList<FastBitSet>();

      for (Integer groupingSet: groupingSets) {
        // Create the mapping corresponding to the grouping set
        ExprNodeEvaluator groupingSetValueEvaluator =
          ExprNodeEvaluatorFactory.get(new ExprNodeConstantDesc(String.valueOf(groupingSet)));

        newKeysGroupingSets.add(groupingSetValueEvaluator.evaluate(null));
        groupingSetsBitSet.add(groupingSet2BitSet(groupingSet));
      }
    }
View Full Code Here

    ArrayList<String> columnNames = new ArrayList<String>();
    Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
    for (int i = 0; i < col_list.size(); i++) {
      // Replace NULL with CAST(NULL AS STRING)
      if (col_list.get(i) instanceof ExprNodeNullDesc) {
        col_list.set(i, new ExprNodeConstantDesc(
            TypeInfoFactory.stringTypeInfo, null));
      }
      String outputCol = getColumnInternalName(i);
      colExprMap.put(outputCol, col_list.get(i));
      columnNames.add(outputCol);
View Full Code Here

    // This function is called for GroupBy1 to create an additional grouping key
    // for the grouping set (corresponding to the rollup).
    if (groupingSetsPresent) {
      // The value for the constant does not matter. It is replaced by the grouping set
      // value for the actual implementation
      ExprNodeConstantDesc constant = new ExprNodeConstantDesc("0");
      groupByKeys.add(constant);
      String field = getColumnInternalName(groupByKeys.size() - 1);
      outputColumnNames.add(field);
      groupByOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(),
        new ColumnInfo(
View Full Code Here

  private ExprNodeDesc genSamplePredicate(TableSample ts,
      List<String> bucketCols, boolean useBucketCols, String alias,
      RowResolver rwsch, QBMetaData qbm, ExprNodeDesc planExpr)
      throws SemanticException {

    ExprNodeDesc numeratorExpr = new ExprNodeConstantDesc(
        TypeInfoFactory.intTypeInfo, Integer.valueOf(ts.getNumerator() - 1));

    ExprNodeDesc denominatorExpr = new ExprNodeConstantDesc(
        TypeInfoFactory.intTypeInfo, Integer.valueOf(ts.getDenominator()));

    ExprNodeDesc intMaxExpr = new ExprNodeConstantDesc(
        TypeInfoFactory.intTypeInfo, Integer.valueOf(Integer.MAX_VALUE));

    ArrayList<ExprNodeDesc> args = new ArrayList<ExprNodeDesc>();
    if (planExpr != null) {
      args.add(planExpr);
View Full Code Here

            TableSample tsSample = new TableSample(1, freq);
            tsSample.setInputPruning(false);
            qb.getParseInfo().setTabSample(alias, tsSample);
            LOG.info("Need sample filter");
            ExprNodeDesc randFunc = TypeCheckProcFactory.DefaultExprProcessor
                .getFuncExprNodeDesc("rand", new ExprNodeConstantDesc(Integer
                .valueOf(460476415)));
            ExprNodeDesc samplePred = genSamplePredicate(tsSample, null, false,
                alias, rwsch, qb.getMetaData(), randFunc);
            tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
                samplePred, true),
View Full Code Here

      }
      if (v == null) {
        throw new SemanticException(ErrorMsg.INVALID_NUMERICAL_CONSTANT
            .getMsg(expr));
      }
      return new ExprNodeConstantDesc(v);
    }
View Full Code Here

        // HiveParser.identifier | HiveParse.KW_IF | HiveParse.KW_LEFT |
        // HiveParse.KW_RIGHT
        str = BaseSemanticAnalyzer.unescapeIdentifier(expr.getText());
        break;
      }
      return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, str);
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.