Examples of ColumnDef


Examples of com.draagon.meta.manager.db.defs.ColumnDef

      if ( table instanceof TableDef ) {
        if ( TRUE.equals( getPersistenceAttribute( mf, IS_VIEWONLY ))) continue;
      }
     
      // Create the column definition
      ColumnDef colDef = new ColumnDef( col, getSQLType( mf ));
     
      // Set the length of the varchar field
      colDef.setLength( mf.getLength() );
     
      // Is it a primary key?
      String key = getPersistenceAttribute( mf , ObjectManager.IS_KEY );
      if ( key != null && key.equalsIgnoreCase( TRUE )) {
        colDef.setPrimaryKey( true );
      }

      // Load extra values if this is a Table Definition
      if ( table instanceof TableDef ) {
       
        // Is it an auto column?
        String auto = getAutoGenerated( mf );
        if ( auto != null ) {
          if ( AUTO_ID.equals( auto )) {
            colDef.setAutoType( ColumnDef.AUTO_ID );
          }
          else if ( ObjectManager.AUTO_CREATE.equals( auto )) {
            colDef.setAutoType( ColumnDef.AUTO_DATE_CREATE );
          }
          else if ( ObjectManager.AUTO_UPDATE.equals( auto )) {
            colDef.setAutoType( ColumnDef.AUTO_DATE_UPDATE );
          }
        }
       
        // Get the sequence if it is defined
        String seq = getSequenceRef( mf );
        if ( seq != null ) {
          int start = getSequenceStart( mf );
          SequenceDef seqDef = new SequenceDef( NameDef.parseName( seq ), start, 1 );
          colDef.setSequence( seqDef );
        }
       
        // Set if it is unique
        colDef.setUnique( isUnique( mf ));
     
        // Check if the column is an index
        if ( isIndex( mf )) {
         
          String name = table.getNameDef().getName() + "_" + col + "_index";
View Full Code Here

Examples of com.salesforce.phoenix.parse.ColumnDef

            }
            if (!pkColumnsNames.isEmpty() && pkColumnsNames.size() != pkColumns.size() - positionOffset) { // Then a column name in the primary key constraint wasn't resolved
                Iterator<Pair<ColumnName,ColumnModifier>> pkColumnNamesIterator = pkColumnsNames.iterator();
                while (pkColumnNamesIterator.hasNext()) {
                    ColumnName colName = pkColumnNamesIterator.next().getFirst();
                    ColumnDef colDef = findColumnDefOrNull(colDefs, colName);
                    if (colDef == null) {
                        throw new ColumnNotFoundException(schemaName, tableName, null, colName.getColumnName());
                    }
                    if (colDef.getColumnDefName().getFamilyName() != null) {
                        throw new SQLExceptionInfo.Builder(SQLExceptionCode.PRIMARY_KEY_WITH_FAMILY_NAME)
                        .setSchemaName(schemaName)
                        .setTableName(tableName)
                        .setColumnName(colDef.getColumnDefName().getColumnName() )
                        .setFamilyName(colDef.getColumnDefName().getFamilyName())
                        .build().buildException();
                    }
                }
                // The above should actually find the specific one, but just in case...
                throw new SQLExceptionInfo.Builder(SQLExceptionCode.INVALID_PRIMARY_KEY_CONSTRAINT)
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.definition.ColumnDef

    InputInfo iInfo = tInfo.getInputInfo(iDef);
    SelectDef selectDef = qDef.getSelectList();
    SelectSpec selectSpec = qDef.getSpec().getSelectList();
    Iterator<Object> selectExprsAndAliases = selectSpec.getColumnListAndAlias();
    int i = 0;
    ColumnDef cDef = null;
   
   
    while(selectExprsAndAliases.hasNext())
    {
      Object[] o = (Object[]) selectExprsAndAliases.next();
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.definition.ColumnDef

 

  public static ColumnDef translateSelectExpr(QueryDef qDef, InputInfo iInfo, int colIdx, String alias, ASTNode expr)
    throws WindowingException
  {
    ColumnDef cDef = new ColumnDef((ColumnSpec) null);
    ExprNodeDesc exprNode = TranslateUtils.buildExprNode(expr, iInfo.getTypeCheckCtx());
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(qDef.getTranslationInfo(), exprNode);
    ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
   
    cDef.setExpression(expr);
    cDef.setExprNode(exprNode);
    cDef.setExprEvaluator(exprEval);
    cDef.setOI(oi);
   
    cDef.setAlias(getAlias(alias, expr, colIdx));
   
    return cDef;
  }
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.definition.ColumnDef

    ObjectInspector[] keyOIs = new ObjectInspector[numCols];
    ObjectInspector[] currentKeyOIs = new ObjectInspector[numCols];
   
    for(int i=0; i<numCols; i++)
    {
      ColumnDef cDef = cols.get(i);
      /*
       * Why cannot we just use the ExprNodeEvaluator on the column?
       * - because on the reduce-side it is initialized based on the rowOI of the HiveTable
       *   and not the OI of the ExtractOp ( the parent of this Operator on the reduce-side)
       */
      keyFields[i] = ExprNodeEvaluatorFactory.get(cDef.getExprNode());
      keyOIs[i] = keyFields[i].initialize(inputOI);
      currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE);
    }
   
    keyWrapperFactory = new WindowingKeyWrapperFactory(keyFields, keyOIs, currentKeyOIs);
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.definition.ColumnDef

      return null;
   
    PartitionDef pDef = new PartitionDef(spec);
    for(ColumnSpec colSpec : spec.getColumns())
    {
      ColumnDef cDef = translatePartitionColumn(qDef, iInfo, colSpec);
      pDef.addColumn(cDef);
    }
    return pDef;
  }
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.definition.ColumnDef

  }


  static ColumnDef translatePartitionColumn(QueryDef qDef, InputInfo iInfo, ColumnSpec cSpec) throws WindowingException
  {
    ColumnDef cDef = new ColumnDef(cSpec);
    translateColumn(qDef, cDef, iInfo,  cSpec);
    TranslateUtils.validateComparable(cDef.getOI(), sprintf("Partition Column %s is not comparable", cSpec));
    return cDef;
  }
View Full Code Here

Examples of org.apache.cassandra.db.migration.avro.ColumnDef

                cfDef.column_metadata.add(column.toAvro());
                break;

            case DROP:
                ColumnDef toDelete = null;

                for (ColumnDef columnDef : cfDef.column_metadata)
                {
                    if (columnDef.name.equals(columnName))
                    {
View Full Code Here

Examples of org.apache.cassandra.db.migration.avro.ColumnDef

                cfDef.column_metadata.add(column.deflate());
                break;

            case DROP:
                ColumnDef toDelete = null;

                for (ColumnDef columnDef : cfDef.column_metadata)
                {
                    if (columnDef.name.equals(columnName))
                    {
View Full Code Here

Examples of org.apache.cassandra.thrift.ColumnDef

        // we'll be adding this one later. make sure it's not already there.
        assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 5 })) == null;
        CfDef cfDef = cfm.toThrift();
       
        // add one.
        ColumnDef addIndexDef = new ColumnDef();
        addIndexDef.index_name = "5";
        addIndexDef.index_type = IndexType.KEYS;
        addIndexDef.name = ByteBuffer.wrap(new byte[] { 5 });
        addIndexDef.validation_class = BytesType.class.getName();
        cfDef.column_metadata.add(addIndexDef);
       
        // remove one.
        ColumnDef removeIndexDef = new ColumnDef();
        removeIndexDef.index_name = "0";
        removeIndexDef.index_type = IndexType.KEYS;
        removeIndexDef.name = ByteBuffer.wrap(new byte[] { 0 });
        removeIndexDef.validation_class = BytesType.class.getName();
        assert cfDef.column_metadata.remove(removeIndexDef);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.