Package org.eigenbase.sql

Examples of org.eigenbase.sql.SqlIdentifier


  @Override
  public SqlNode rewrite(SqlNode sqlNode) throws RelConversionException {
    SqlDescribeTable node = unwrap(sqlNode, SqlDescribeTable.class);

    try {
      List<SqlNode> selectList = ImmutableList.of((SqlNode) new SqlIdentifier("COLUMN_NAME", SqlParserPos.ZERO),
          new SqlIdentifier("DATA_TYPE", SqlParserPos.ZERO),
          new SqlIdentifier("IS_NULLABLE", SqlParserPos.ZERO));

      SqlNode fromClause = new SqlIdentifier(
          ImmutableList.of("INFORMATION_SCHEMA", "COLUMNS"), null, SqlParserPos.ZERO, null);

      final SqlIdentifier table = node.getTable();
      final SchemaPlus schema = findSchema(context.getRootSchema(), context.getNewDefaultSchema(),
          Util.skipLast(table.names));
      final String tableName = Util.last(table.names);

      if (schema.getTable(tableName) == null) {
        throw new RelConversionException(String.format("Table %s is not valid", Util.sepList(table.names, ".")));
      }

      SqlNode schemaCondition = null;
      if (!isRootSchema(schema)) {
        AbstractSchema drillSchema = getDrillSchema(schema);

        schemaCondition = DrillParserUtil.createCondition(
            new SqlIdentifier("TABLE_SCHEMA", SqlParserPos.ZERO),
            SqlStdOperatorTable.EQUALS,
            SqlLiteral.createCharString(drillSchema.getFullSchemaName(), CHARSET, SqlParserPos.ZERO)
        );
      }

      SqlNode where = DrillParserUtil.createCondition(
          new SqlIdentifier("TABLE_NAME", SqlParserPos.ZERO),
          SqlStdOperatorTable.EQUALS,
          SqlLiteral.createCharString(tableName, CHARSET, SqlParserPos.ZERO));

      where = DrillParserUtil.createCondition(schemaCondition, SqlStdOperatorTable.AND, where);

      SqlNode columnFilter = null;
      if (node.getColumn() != null) {
        columnFilter = DrillParserUtil.createCondition(new SqlIdentifier("COLUMN_NAME", SqlParserPos.ZERO),
            SqlStdOperatorTable.EQUALS,
            SqlLiteral.createCharString(node.getColumn().toString(), CHARSET, SqlParserPos.ZERO));
      } else if (node.getColumnQualifier() != null) {
        columnFilter = DrillParserUtil.createCondition(new SqlIdentifier("COLUMN_NAME", SqlParserPos.ZERO),
            SqlStdOperatorTable.LIKE, node.getColumnQualifier());
      }

      where = DrillParserUtil.createCondition(where, SqlStdOperatorTable.AND, columnFilter);
View Full Code Here


  }

  @Override
  public PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException {

    SqlIdentifier from = ((SqlShowFiles) sqlNode).getDb();

    DrillFileSystem fs = null;
    String defaultLocation = null;
    String fromDir = "./";

    try {
      SchemaPlus defaultSchema = context.getNewDefaultSchema();
      SchemaPlus drillSchema = defaultSchema;

      // Show files can be used without from clause, in which case we display the files in the default schema
      if (from != null) {
        // We are not sure if the full from clause is just the schema or includes table name, first try to see if the full path specified is a schema
        try {
          drillSchema = findSchema(context.getRootSchema(), defaultSchema, from.names);
        } catch (Exception e) {
            // Entire from clause is not a schema, try to obtain the schema without the last part of the specified clause.
            drillSchema = findSchema(context.getRootSchema(), defaultSchema, from.names.subList(0, from.names.size() - 1));
            fromDir = fromDir + from.names.get((from.names.size() - 1));
        }
      }

      AbstractSchema tempSchema = getDrillSchema(drillSchema);
      WorkspaceSchema schema = null;
      if (tempSchema instanceof WorkspaceSchema) {
        schema = ((WorkspaceSchema)tempSchema);
      } else {
        throw new ValidationException("Unsupported schema");
      }

      // Get the file system object
      fs = schema.getFS();

      // Get the default path
      defaultLocation = schema.getDefaultLocation();
    } catch (Exception e) {
        if (from == null) {
          return DirectPlan.createDirectPlan(context, false, "Show files without FROM / IN clause can be used only after specifying a default file system schema");
        }
        return DirectPlan.createDirectPlan(context, false, String.format("Current schema '%s' is not a file system schema. " +
                                           "Can't execute show files on this schema.", from.toString()));
    }

    List<ShowFilesCommandResult> rows = new ArrayList<>();

    for (FileStatus fileStatus : fs.list(false, new Path(defaultLocation, fromDir))) {
View Full Code Here

public class DrillSqlOperator extends SqlFunction {
  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillSqlOperator.class);

  public DrillSqlOperator(String name, int argCount) {
    super(new SqlIdentifier(name, SqlParserPos.ZERO), DynamicReturnType.INSTANCE, null, new Checker(argCount), null, SqlFunctionCategory.USER_DEFINED_FUNCTION);
  }
View Full Code Here

public class DrillSqlAggOperator extends SqlAggFunction {
  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillSqlAggOperator.class);

 
  public DrillSqlAggOperator(String name, int argCount) {
    super(name, new SqlIdentifier(name, SqlParserPos.ZERO), SqlKind.OTHER_FUNCTION, DynamicReturnType.INSTANCE, null, new Checker(argCount), SqlFunctionCategory.USER_DEFINED_FUNCTION);
  }
View Full Code Here

    List<SqlNode> selectList = Lists.newArrayList();
    SqlNode fromClause;
    SqlNode where;

    // create select columns
    selectList.add(new SqlIdentifier("TABLE_SCHEMA", SqlParserPos.ZERO));
    selectList.add(new SqlIdentifier("TABLE_NAME", SqlParserPos.ZERO));

    fromClause = new SqlIdentifier(ImmutableList.of("INFORMATION_SCHEMA", "TABLES"), SqlParserPos.ZERO);

    final SqlIdentifier db = node.getDb();
    String tableSchema;
    if (db != null) {
      tableSchema = db.toString();
    } else {
      // If no schema is given in SHOW TABLES command, list tables from current schema
      SchemaPlus schema = context.getNewDefaultSchema();

      if (isRootSchema(schema)) {
        // If the default schema is a root schema, throw an error to select a default schema
        throw new RelConversionException("No schema selected. Select a schema using 'USE schema' command");
      }

      AbstractSchema drillSchema;

      try {
        drillSchema = getDrillSchema(schema);
      } catch(Exception ex) {
        throw new RelConversionException("Error while rewriting SHOW TABLES query: " + ex.getMessage(), ex);
      }

      tableSchema = drillSchema.getFullSchemaName();
    }

    where = DrillParserUtil.createCondition(
        new SqlIdentifier("TABLE_SCHEMA", SqlParserPos.ZERO),
        SqlStdOperatorTable.EQUALS,
        SqlLiteral.createCharString(tableSchema, CHARSET, SqlParserPos.ZERO));

    SqlNode filter = null;
    final SqlNode likePattern = node.getLikePattern();
    if (likePattern != null) {
      filter = DrillParserUtil.createCondition(
          new SqlIdentifier("TABLE_NAME", SqlParserPos.ZERO),
          SqlStdOperatorTable.LIKE,
          likePattern);
    } else if (node.getWhereClause() != null) {
      filter = node.getWhereClause();
    }
View Full Code Here

    }
  }

  public SqlNode getAsSqlNode(){
    if(ids.size() == 1){
      return new SqlIdentifier(Collections.singletonList(ids.get(0).value), ids.get(0).parserPos);
    }

    int startIndex;
    SqlNode node;

    if(ids.get(1).isArray()){
      // handle everything post zero index as item operator.
      startIndex = 1;
      node = new SqlIdentifier( //
          ImmutableList.of(ids.get(0).value), //
          null, //
          ids.get(0).parserPos, //
          ImmutableList.of(ids.get(0).parserPos));
    }else{
      // handle everything post two index as item operator.
      startIndex = 2;
      node = new SqlIdentifier( //
          ImmutableList.of(ids.get(0).value, ids.get(1).value), //
          null, //
          ids.get(0).parserPos, //
          ImmutableList.of(ids.get(0).parserPos, ids.get(1).parserPos));
View Full Code Here

    for(int i =0; i < ids.size(); i++){
      IdentifierHolder holder = ids.get(i);
      names.add(holder.value);
      pos.add(holder.parserPos);
    }
    return new SqlIdentifier(names, null, pos.get(0), pos);
  }
View Full Code Here

  /** Rewrite the parse tree as SELECT ... FROM INFORMATION_SCHEMA.SCHEMATA ... */
  @Override
  public SqlNode rewrite(SqlNode sqlNode) throws RelConversionException{
    SqlShowSchemas node = unwrap(sqlNode, SqlShowSchemas.class);
    List<SqlNode> selectList = ImmutableList.of((SqlNode) new SqlIdentifier("SCHEMA_NAME", SqlParserPos.ZERO));

    SqlNode fromClause = new SqlIdentifier(
        ImmutableList.of("INFORMATION_SCHEMA", "SCHEMATA"), null, SqlParserPos.ZERO, null);

    SqlNode where = null;
    final SqlNode likePattern = node.getLikePattern();
    if (likePattern != null) {
      where = DrillParserUtil.createCondition(new SqlIdentifier("SCHEMA_NAME", SqlParserPos.ZERO),
          SqlStdOperatorTable.LIKE, likePattern);
    } else if (node.getWhereClause() != null) {
      where = node.getWhereClause();
    }

View Full Code Here

import org.eigenbase.sql.validate.SqlValidatorScope;

public class HiveUDFOperator extends SqlFunction {

  public HiveUDFOperator(String name) {
    super(new SqlIdentifier(name, SqlParserPos.ZERO), DynamicReturnType.INSTANCE, null, new ArgChecker(), null,
        SqlFunctionCategory.USER_DEFINED_FUNCTION);
  }
View Full Code Here

public class DrillSqlOperator extends SqlFunction {
  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillSqlOperator.class);

  public DrillSqlOperator(String name, int argCount) {
    super(new SqlIdentifier(name, SqlParserPos.ZERO), DynamicReturnType.INSTANCE, null, new Checker(argCount), null, SqlFunctionCategory.USER_DEFINED_FUNCTION);
  }
View Full Code Here

TOP

Related Classes of org.eigenbase.sql.SqlIdentifier

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.