Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.DDLWork


      }
    }
   
    boolean isExt = ast.getChildCount() > 1;
    descTableDesc descTblDesc = new descTableDesc(ctx.getResFile(), tableName, partSpec, isExt);
    rootTasks.add(TaskFactory.get(new DDLWork(descTblDesc), conf));
    setFetchTask(createFetchTask(descTblDesc.getSchema()));
    LOG.info("analyzeDescribeTable done");
  }
View Full Code Here


  private void analyzeShowPartitions(ASTNode ast)
  throws SemanticException {
    showPartitionsDesc showPartsDesc;
    String tableName = unescapeIdentifier(ast.getChild(0).getText());
    showPartsDesc = new showPartitionsDesc(tableName, ctx.getResFile());
    rootTasks.add(TaskFactory.get(new DDLWork(showPartsDesc), conf));
    setFetchTask(createFetchTask(showPartsDesc.getSchema()));
  }
View Full Code Here

      showTblsDesc = new showTablesDesc(ctx.getResFile(), tableNames);
    }
    else {
      showTblsDesc = new showTablesDesc(ctx.getResFile());
    }
    rootTasks.add(TaskFactory.get(new DDLWork(showTblsDesc), conf));
    setFetchTask(createFetchTask(showTblsDesc.getSchema()));
  }
View Full Code Here

  private void analyzeAlterTableRename(ASTNode ast)
  throws SemanticException {
    alterTableDesc alterTblDesc = new alterTableDesc(
        unescapeIdentifier(ast.getChild(0).getText()),
        unescapeIdentifier(ast.getChild(1).getText()));
    rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf));
  }
View Full Code Here

  private void analyzeAlterTableModifyCols(ASTNode ast, alterTableTypes alterType)
  throws SemanticException {
    String tblName = unescapeIdentifier(ast.getChild(0).getText());
    List<FieldSchema> newCols = getColumns((ASTNode)ast.getChild(1));
    alterTableDesc alterTblDesc = new alterTableDesc(tblName, newCols, alterType);
    rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf));
  }
View Full Code Here

  private void analyzeAlterTableDropParts(ASTNode ast) throws SemanticException {
    String tblName = unescapeIdentifier(ast.getChild(0).getText());
    // get table metadata
    List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
    dropTableDesc dropTblDesc = new dropTableDesc(tblName, partSpecs);
    rootTasks.add(TaskFactory.get(new DDLWork(dropTblDesc), conf));
  }
View Full Code Here

      case HiveParser.TOK_PARTSPEC:
        if(currentPart != null) {
          AddPartitionDesc addPartitionDesc =
            new AddPartitionDesc(MetaStoreUtils.DEFAULT_DATABASE_NAME,
                tblName, currentPart, currentLocation);
          rootTasks.add(TaskFactory.get(new DDLWork(addPartitionDesc), conf));
        }
        //create new partition, set values
        currentLocation = null;
        currentPart = partIter.next();
        break;
      case HiveParser.TOK_PARTITIONLOCATION:
        //if location specified, set in partition
        currentLocation = unescapeSQLString(child.getChild(0).getText());
        break;
      default:
        throw new SemanticException("Unknown child: " + child);
      }
    }
   
    //add the last one
    if(currentPart != null) {
      AddPartitionDesc addPartitionDesc =
        new AddPartitionDesc(MetaStoreUtils.DEFAULT_DATABASE_NAME,
            tblName, currentPart, currentLocation);
      rootTasks.add(TaskFactory.get(new DDLWork(addPartitionDesc), conf));
    }
  } 
View Full Code Here

    if(ast.getChildCount() > 0) {
      tableName = unescapeIdentifier(ast.getChild(0).getText());
    }
    List<Map<String, String>> specs = getPartitionSpecs(ast);
    MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile());
    rootTasks.add(TaskFactory.get(new DDLWork(checkDesc), conf));  
  }
View Full Code Here

      // Clear the output for CTAS since we don't need the output from the
      // mapredWork, the
      // DDLWork at the tail of the chain will have the output
      outputs.clear();

      Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(
          inputs, outputs, crtTblDesc), conf);

      // find all leaf tasks and make the DDLTask as a dependent task of all of
      // them
      HashSet<Task<? extends Serializable>> leaves = new HashSet<Task<? extends Serializable>>();
View Full Code Here

        && SessionState.get().getAuthenticator() != null) {
      roleOwnerName = SessionState.get().getAuthenticator().getUserName();
    }
    GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(grant,
        roles, principalDesc, roleOwnerName, PrincipalType.USER, true);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        grantRevokeRoleDDL), conf));
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.DDLWork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.