Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc


    ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(columnNumber);
    for (Integer posn : posns) {
      ObjectInspector tableFieldOI = tableFields.get(posn).getFieldObjectInspector();
      TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
      TypeInfo rowFieldTypeInfo = rowFields.get(posn).getType();
      ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
          rowFields.get(posn).getInternalName(), rowFields.get(posn).getTabAlias(),
          rowFields.get(posn).getIsVirtualCol());

      if (convert && !tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
        // need to do some conversions here
View Full Code Here


    // For the generation of the values expression just get the inputs
    // signature and generate field expressions for those
    Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
    ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
    for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
      valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
          .getInternalName(), colInfo.getTabAlias(), colInfo
          .getIsVirtualCol()));
      colExprMap.put(colInfo.getInternalName(), valueCols
          .get(valueCols.size() - 1));
    }

    ArrayList<String> outputColumns = new ArrayList<String>();
    for (int i = 0; i < valueCols.size(); i++) {
      outputColumns.add(getColumnInternalName(i));
    }

    StringBuilder order = new StringBuilder();
    for (int sortOrder : sortOrders) {
      order.append(sortOrder == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC ? '+' : '-');
    }

    Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
        .getReduceSinkDesc(sortCols, valueCols, outputColumns, false, -1,
            partitionCols, order.toString(), numReducers),
        new RowSchema(inputRR.getColumnInfos()), input), inputRR);
    interim.setColumnExprMap(colExprMap);
    reduceSinkOperatorsAddedByEnforceBucketingSorting.add((ReduceSinkOperator) interim);

    // Add the extract operator to get the value fields
    RowResolver out_rwsch = new RowResolver();
    RowResolver interim_rwsch = inputRR;
    Integer pos = Integer.valueOf(0);
    for (ColumnInfo colInfo : interim_rwsch.getColumnInfos()) {
      String[] info = interim_rwsch.reverseLookup(colInfo.getInternalName());
      out_rwsch.put(info[0], info[1], new ColumnInfo(
          getColumnInternalName(pos), colInfo.getType(), info[0],
          colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol()));
      pos = Integer.valueOf(pos.intValue() + 1);
    }

    Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
        new ExtractDesc(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
            Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema(
            out_rwsch.getColumnInfos()), interim), out_rwsch);

    if (LOG.isDebugEnabled()) {
      LOG.debug("Created ReduceSink Plan for table: " + tab.getTableName() +
View Full Code Here

    // For the generation of the values expression just get the inputs
    // signature and generate field expressions for those
    Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
    ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
    for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
      valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
          .getInternalName(), colInfo.getTabAlias(), colInfo
          .getIsVirtualCol()));
      colExprMap.put(colInfo.getInternalName(), valueCols
          .get(valueCols.size() - 1));
    }

    ArrayList<String> outputColumns = new ArrayList<String>();
    for (int i = 0; i < valueCols.size(); i++) {
      outputColumns.add(getColumnInternalName(i));
    }
    Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
        .getReduceSinkDesc(sortCols, valueCols, outputColumns, false, -1,
            partitionCols, order.toString(), numReducers),
        new RowSchema(inputRR.getColumnInfos()), input), inputRR);
    interim.setColumnExprMap(colExprMap);

    // Add the extract operator to get the value fields
    RowResolver out_rwsch = new RowResolver();
    RowResolver interim_rwsch = inputRR;
    Integer pos = Integer.valueOf(0);
    for (ColumnInfo colInfo : interim_rwsch.getColumnInfos()) {
      String[] info = interim_rwsch.reverseLookup(colInfo.getInternalName());
      out_rwsch.put(info[0], info[1], new ColumnInfo(
          getColumnInternalName(pos), colInfo.getType(), info[0],
          colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol()));
      pos = Integer.valueOf(pos.intValue() + 1);
    }

    Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
        new ExtractDesc(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
            Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema(
            out_rwsch.getColumnInfos()), interim), out_rwsch);

    if (LOG.isDebugEnabled()) {
      LOG.debug("Created ReduceSink Plan for clause: " + dest + " row schema: "
View Full Code Here

          HashMap<String, ColumnInfo> map = inputRS.getFieldMap(key);
          Iterator<String> fNamesIter = map.keySet().iterator();
          while (fNamesIter.hasNext()) {
            String field = fNamesIter.next();
            ColumnInfo valueInfo = inputRS.get(key, field);
            keyDesc.add(new ExprNodeColumnDesc(valueInfo.getType(), valueInfo
                .getInternalName(), valueInfo.getTabAlias(), valueInfo
                .getIsVirtualCol()));

            if (outputRS.get(key, field) == null) {
              String colName = getColumnInternalName(outputPos);
View Full Code Here

    final String F1 = "#affiliations";
    final String F2 = "friends[0].friendid";

    try {
      // initialize a complete map reduce configuration
      ExprNodeDesc expr1 = new ExprNodeColumnDesc(
          TypeInfoFactory.stringTypeInfo, F1, "", false);
      ExprNodeDesc expr2 = new ExprNodeColumnDesc(
          TypeInfoFactory.stringTypeInfo, F2, "", false);
      ExprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("==", expr1, expr2);

      FilterDesc filterCtx = new FilterDesc(filterExpr, false);
View Full Code Here

    }
  }

  private FilterDesc getTestFilterDesc(String column) {
    ArrayList<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>();
    children1.add(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
        column, "", false));
    ExprNodeDesc lhs = new ExprNodeGenericFuncDesc(
        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
        serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
View Full Code Here

    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
        + "mapredplan3.out", Utilities.defaultTd, false));

    Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities
        .makeList(new ExprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
        new ExprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo),
        Utilities.ReduceField.VALUE.toString(), "", false), "0", false)),
        Utilities.makeList(outputColumns.get(0))), op4);

    rWork.setReducer(op5);
  }
View Full Code Here

    rWork.setReducer(op2);
  }

  public static ExprNodeColumnDesc getStringColumn(String columnName) {
    return new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName,
        "", false);
  }
View Full Code Here

    if (expression.getChildren() == null) {
      return false;
    }

    if (expression.getChildren().size() == 2) {
      ExprNodeColumnDesc columnDesc = null;
      if (expression.getChildren().get(0) instanceof ExprNodeColumnDesc) {
        columnDesc = (ExprNodeColumnDesc)expression.getChildren().get(0);
      } else if (expression.getChildren().get(1) instanceof ExprNodeColumnDesc) {
        columnDesc = (ExprNodeColumnDesc)expression.getChildren().get(1);
      }

      // Is this the target
      if (columnDesc != null && !partitionCols.contains(columnDesc.getColumn())) {
        assert expression instanceof ExprNodeGenericFuncDesc :
               "Expression containing index column is does not support sorting, should not try" +
               "and sort";
        ((ExprNodeGenericFuncDesc)expression).setSortedExpr(true);
        return true;
View Full Code Here

            continue;
          }
          String outputCol = oldValueInfo.getInternalName();
          if (outputRS.get(key, field) == null) {
            outputColumnNames.add(outputCol);
            ExprNodeDesc colDesc = new ExprNodeColumnDesc(valueInfo.getType(), valueInfo
                .getInternalName(), valueInfo.getTabAlias(), valueInfo.getIsVirtualCol());
            values.add(colDesc);
            outputRS.put(key, field, new ColumnInfo(outputCol, valueInfo.getType(), valueInfo
                .getTabAlias(), valueInfo.getIsVirtualCol(), valueInfo.isHiddenVirtualCol()));
            colExprMap.put(outputCol, colDesc);
          }
        }
      }

      valueExprMap.put(Byte.valueOf((byte) pos), values);
    }

    Map<Byte, List<ExprNodeDesc>> filterMap = desc.getFilters();
    for (Map.Entry<Byte, List<ExprNodeDesc>> entry : filterMap.entrySet()) {
      Byte srcAlias = entry.getKey();
      List<ExprNodeDesc> columnDescList = entry.getValue();

      for (ExprNodeDesc nodeExpr : columnDescList) {
        ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) nodeExpr;
        for (ExprNodeDesc childDesc : funcDesc.getChildExprs()) {
          if (!(childDesc instanceof ExprNodeColumnDesc)) {
            continue;
          }
          ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) childDesc;
          // reset columns
          String column = columnDesc.getColumn();
          String newColumn = null;
          HashMap<String, ExprNodeDesc> map = columnTransfer.get(srcAlias);
          ExprNodeColumnDesc tmpDesc = (ExprNodeColumnDesc) map.get(column);
          if (tmpDesc != null) {
            newColumn = tmpDesc.getColumn();
          }
          if (newColumn == null) {
            throw new SemanticException("No Column name found in parent reduce sink op");
          }
          columnDesc.setColumn(newColumn);
        }
      }
    }

    JoinCondDesc[] joinCondns = op.getConf().getConds();

    Operator[] newPar = new Operator[newParentOps.size()];
    pos = 0;
    for (Operator<? extends Serializable> o : newParentOps) {
      newPar[pos++] = o;
    }

    List<ExprNodeDesc> keyCols = keyExprMap.get(Byte.valueOf((byte) 0));
    StringBuilder keyOrder = new StringBuilder();
    for (int i = 0; i < keyCols.size(); i++) {
      keyOrder.append("+");
    }

    TableDesc keyTableDesc = PlanUtils.getMapJoinKeyTableDesc(PlanUtils
        .getFieldSchemasFromColumnList(keyCols, "mapjoinkey"));

    List<TableDesc> valueTableDescs = new ArrayList<TableDesc>();
    List<TableDesc> valueFiltedTableDescs = new ArrayList<TableDesc>();

    for (pos = 0; pos < newParentOps.size(); pos++) {
      List<ExprNodeDesc> valueCols = valueExprMap.get(Byte.valueOf((byte) pos));
      int length = valueCols.size();
      List<ExprNodeDesc> valueFilteredCols = new ArrayList<ExprNodeDesc>(length);
      // deep copy expr node desc
      for (int i = 0; i < length; i++) {
        valueFilteredCols.add(valueCols.get(i).clone());
      }
      List<ExprNodeDesc> valueFilters = filterMap.get(Byte.valueOf((byte) pos));

      if (valueFilters != null && valueFilters.size() != 0 && pos != mapJoinPos) {
        ExprNodeColumnDesc isFilterDesc = new ExprNodeColumnDesc(TypeInfoFactory
            .getPrimitiveTypeInfo(Constants.BOOLEAN_TYPE_NAME), "filter", "filter", false);
        valueFilteredCols.add(isFilterDesc);
      }

View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.