Package org.apache.hcatalog.data.schema

Examples of org.apache.hcatalog.data.schema.HCatSchema


    case DataType.BAG:
      Schema bagSchema = fSchema.schema;
      List<HCatFieldSchema> arrFields = new ArrayList<HCatFieldSchema>(1);
      arrFields.add(getHCatFSFromPigFS(bagSchema.getField(0), hcatTblSchema));
      return new HCatFieldSchema(fSchema.alias, Type.ARRAY, new HCatSchema(arrFields), "");

    case DataType.TUPLE:
      List<String> fieldNames = new ArrayList<String>();
      List<HCatFieldSchema> hcatFSs = new ArrayList<HCatFieldSchema>();
      for( FieldSchema fieldSchema : fSchema.schema.getFields()){
        fieldNames.add( fieldSchema.alias);
        hcatFSs.add(getHCatFSFromPigFS(fieldSchema, hcatTblSchema));
      }
      return new HCatFieldSchema(fSchema.alias, Type.STRUCT, new HCatSchema(hcatFSs), "");

    case DataType.MAP:{
      // Pig's schema contain no type information about map's keys and
      // values. So, if its a new column assume <string,string> if its existing
      // return whatever is contained in the existing column.
      HCatFieldSchema mapField = getTableCol(fSchema.alias, hcatTblSchema);
      HCatFieldSchema valFS;
      List<HCatFieldSchema> valFSList = new ArrayList<HCatFieldSchema>(1);

      if(mapField != null){
        Type mapValType = mapField.getMapValueSchema().get(0).getType();

        switch(mapValType){
        case STRING:
        case BIGINT:
        case INT:
        case FLOAT:
        case DOUBLE:
          valFS = new HCatFieldSchema(fSchema.alias, mapValType, null);
          break;
        default:
          throw new FrontendException("Only pig primitive types are supported as map value types.", PigHCatUtil.PIG_EXCEPTION_CODE);
        }
        valFSList.add(valFS);
        return new HCatFieldSchema(fSchema.alias,Type.MAP,Type.STRING, new HCatSchema(valFSList),"");
      }

      // Column not found in target table. Its a new column. Its schema is map<string,string>
      valFS = new HCatFieldSchema(fSchema.alias, Type.STRING, "");
      valFSList.add(valFS);
      return new HCatFieldSchema(fSchema.alias,Type.MAP,Type.STRING, new HCatSchema(valFSList),"");
     }

    default:
      throw new FrontendException("Unsupported type: "+type+"  in Pig's schema", PigHCatUtil.PIG_EXCEPTION_CODE);
    }
View Full Code Here


    }
    LOG.debug("getSchema got schema :" + tableSchema.toString());
    List<HCatFieldSchema> colsPlusPartKeys = new ArrayList<HCatFieldSchema>();
    colsPlusPartKeys.addAll(tableSchema.getFields());
    colsPlusPartKeys.addAll(partitionSchema.getFields());
    outputSchema = new HCatSchema(colsPlusPartKeys);
    return PigHCatUtil.getResourceSchema(outputSchema);
  }
View Full Code Here

    tableSchema = rv.get(0);
    partitionSchema = rv.get(1);
    List<HCatFieldSchema> colsPlusPartKeys = new ArrayList<HCatFieldSchema>();
    colsPlusPartKeys.addAll(tableSchema.getFields());
    colsPlusPartKeys.addAll(partitionSchema.getFields());
    outputSchema = new HCatSchema(colsPlusPartKeys);
    UDFContext udfContext = UDFContext.getUDFContext();
    Properties props = udfContext.getUDFProperties(this.getClass(),
          new String[] {signature});
    RequiredFieldList requiredFieldsInfo =
          (RequiredFieldList) props.get(PRUNE_PROJECTION_INFO);
    if (requiredFieldsInfo != null) {
      ArrayList<HCatFieldSchema> fcols = new ArrayList<HCatFieldSchema>();
      for (RequiredField rf : requiredFieldsInfo.getFields()) {
        fcols.add(tableSchema.getFields().get(rf.getIndex()));
      }
      outputSchema = new HCatSchema(fcols);
      try {
        HCatBaseInputFormat.setOutputSchema(job, outputSchema);
      } catch (Exception e) {
        throw new IOException(e);
      }
View Full Code Here

            + "current_grades:map<string,string>,"
            + "phnos:array<struct<phno:string,type:string>>,blah:array<int>>";

        TypeInfo ti = TypeInfoUtils.getTypeInfoFromTypeString(typeString);

        HCatSchema hsch = HCatSchemaUtils.getHCatSchemaFromTypeString(typeString);
        System.out.println(ti.getTypeName());
        System.out.println(hsch.toString());
        assertEquals(ti.getTypeName(),hsch.toString());
        assertEquals(hsch.toString(),typeString);
    }
View Full Code Here

      return null;
    }

    Properties props = UDFContext.getUDFContext().getUDFProperties(
        classForUDFCLookup, new String[] {signature});
    HCatSchema hcatTableSchema = (HCatSchema) props.get(HCatConstants.HCAT_TABLE_SCHEMA);

    ArrayList<HCatFieldSchema> fcols = new ArrayList<HCatFieldSchema>();
    for(RequiredField rf: fields) {
      fcols.add(hcatTableSchema.getFields().get(rf.getIndex()));
    }
    return new HCatSchema(fcols);
  }
View Full Code Here

  }

  public void testConvertValueToTuple() throws IOException,InterruptedException{
    BytesRefArrayWritable[] bytesArr = initTestEnvironment();

    HCatSchema schema = buildHiveSchema();
    RCFileInputDriver sd = new RCFileInputDriver();
    JobContext jc = new JobContext(conf, new JobID());
    sd.setInputPath(jc, file.toString());
    InputFormat<?,?> iF = sd.getInputFormat(null);
    InputSplit split = iF.getSplits(jc).get(0);
View Full Code Here

    return  new HCatRecord[]{tup_1,tup_2};
  }

  private HCatSchema buildHiveSchema() throws HCatException{
    return new HCatSchema(HCatUtil.getHCatFieldSchemaList(new FieldSchema("atinyint", "tinyint", ""),
                                                          new FieldSchema("asmallint", "smallint", ""),
                                                          new FieldSchema("aint", "int", ""),
                                                          new FieldSchema("along", "bigint", ""),
                                                          new FieldSchema("adouble", "double", ""),
                                                          new FieldSchema("astring", "string", ""),
View Full Code Here

                                                          new FieldSchema("anullint", "int", ""),
                                                          new FieldSchema("anullstring", "string", "")));
  }

  private HCatSchema buildPrunedSchema() throws HCatException{
    return new HCatSchema(HCatUtil.getHCatFieldSchemaList(new FieldSchema("atinyint", "tinyint", ""),
                                                          new FieldSchema("aint", "int", ""),
                                                          new FieldSchema("adouble", "double", ""),
                                                          new FieldSchema("astring", "string", ""),
                                                          new FieldSchema("anullint", "int", "")));
  }
View Full Code Here

                                                          new FieldSchema("astring", "string", ""),
                                                          new FieldSchema("anullint", "int", "")));
  }

  private HCatSchema buildReorderedSchema() throws HCatException{
    return new HCatSchema(HCatUtil.getHCatFieldSchemaList(new FieldSchema("aint", "int", ""),
                                                          new FieldSchema("part1", "string", ""),
                                                          new FieldSchema("adouble", "double", ""),
                                                          new FieldSchema("newCol", "tinyint", ""),
                                                          new FieldSchema("astring", "string", ""),
                                                          new FieldSchema("atinyint", "tinyint", ""),
View Full Code Here

  public void testConversion() throws IOException {
    Configuration conf = new Configuration();
    JobContext jc = new JobContext(conf, new JobID());

    HCatSchema schema = buildHiveSchema();
    HCatInputStorageDriver isd = new RCFileInputDriver();

    isd.setOriginalSchema(jc, schema);
    isd.setOutputSchema(jc, schema);
    isd.initialize(jc, new Properties());
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.data.schema.HCatSchema

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.