Examples of DBConfiguration


Examples of com.cloudera.sqoop.mapreduce.db.DBConfiguration

   */
  public static void setInput(Job job,
      Class<? extends DBWritable> inputClass,
      String inputQuery, String inputCountQuery) {
    job.setInputFormatClass(DBInputFormat.class);
    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());
    dbConf.setInputClass(inputClass);
    dbConf.setInputQuery(inputQuery);
    dbConf.setInputCountQuery(inputCountQuery);
  }
View Full Code Here

Examples of com.cloudera.sqoop.mapreduce.db.DBConfiguration

    StringBuilder query = new StringBuilder();

    DataDrivenDBInputFormat.DataDrivenDBInputSplit dataSplit =
        (DataDrivenDBInputFormat.DataDrivenDBInputSplit) getSplit();

    DBConfiguration dbConf = getDBConf();
    String [] fieldNames = getFieldNames();
    String tableName = getTableName();
    String conditions = getConditions();

    // Build the WHERE clauses associated with the data split first.
    // We need them in both branches of this function.
    StringBuilder conditionClauses = new StringBuilder();
    conditionClauses.append("( ").append(dataSplit.getLowerClause());
    conditionClauses.append(" ) AND ( ").append(dataSplit.getUpperClause());
    conditionClauses.append(" )");

    if (dbConf.getInputQuery() == null) {
      // We need to generate the entire query.
      query.append("SELECT ");

      for (int i = 0; i < fieldNames.length; i++) {
        query.append(fieldNames[i]);
        if (i != fieldNames.length -1) {
          query.append(", ");
        }
      }

      query.append(" FROM ").append(tableName);

      String tableHints =
        dbConf.getConf().get(SQLServerManager.TABLE_HINTS_PROP);
      if (tableHints != null) {
        LOG.info("Using table hints: " + tableHints);
        query.append(" WITH (").append(tableHints).append(")");
      }

      query.append(" WHERE ");
      if (conditions != null && conditions.length() > 0) {
        // Put the user's conditions first.
        query.append("( ").append(conditions).append(" ) AND ");
      }

      // Now append the conditions associated with our split.
      query.append(conditionClauses.toString());

    } else {
      // User provided the query. We replace the special token with
      // our WHERE clause.
      String inputQuery = dbConf.getInputQuery();
      if (inputQuery.indexOf(DataDrivenDBInputFormat.SUBSTITUTE_TOKEN) == -1) {
        LOG.error("Could not find the clause substitution token "
            + DataDrivenDBInputFormat.SUBSTITUTE_TOKEN + " in the query: ["
            + inputQuery + "]. Parallel splits may not work correctly.");
      }
View Full Code Here

Examples of com.cloudera.sqoop.mapreduce.db.DBConfiguration

   * clause consitions provided as parameters
   * This is needed for recovering from connection failures after some data
   * in the split have been already processed */
  protected String getSelectQuery(String lowerClause, String upperClause) {
    StringBuilder query = new StringBuilder();
    DBConfiguration dbConf = getDBConf();
    String [] fieldNames = getFieldNames();
    String tableName = getTableName();
    String conditions = getConditions();

    // Build the WHERE clauses associated with the data split first.
    // We need them in both branches of this function.
    StringBuilder conditionClauses = new StringBuilder();
    conditionClauses.append("( ").append(lowerClause);
    conditionClauses.append(" ) AND ( ").append(upperClause);
    conditionClauses.append(" )");

    if (dbConf.getInputQuery() == null) {
      // We need to generate the entire query.
      query.append("SELECT ");

      for (int i = 0; i < fieldNames.length; i++) {
        query.append(fieldNames[i]);
        if (i != fieldNames.length -1) {
          query.append(", ");
        }
      }

      query.append(" FROM ").append(tableName);
      if (!dbProductName.startsWith("ORACLE")
          && !dbProductName.startsWith("DB2")
          && !dbProductName.startsWith("MICROSOFT SQL SERVER")
          && !dbProductName.startsWith("POSTGRESQL")) {
        // The AS clause is required for hsqldb. Some other databases might have
        // issues with it, so we're skipping some of them.
        query.append(" AS ").append(tableName);
      }
      query.append(" WHERE ");
      if (conditions != null && conditions.length() > 0) {
        // Put the user's conditions first.
        query.append("( ").append(conditions).append(" ) AND ");
      }

      // Now append the conditions associated with our split.
      query.append(conditionClauses.toString());

    } else {
      // User provided the query. We replace the special token with
      // our WHERE clause.
      String inputQuery = dbConf.getInputQuery();
      if (inputQuery.indexOf(DataDrivenDBInputFormat.SUBSTITUTE_TOKEN) == -1) {
        LOG.error("Could not find the clause substitution token "
            + DataDrivenDBInputFormat.SUBSTITUTE_TOKEN + " in the query: ["
            + inputQuery + "]. Parallel splits may not work correctly.");
      }
View Full Code Here

Examples of com.cloudera.sqoop.mapreduce.db.DBConfiguration

  @Override
  /** {@inheritDoc} */
  protected RecordReader<LongWritable, T> createDBRecordReader(
      DBInputSplit split, Configuration conf) throws IOException {

    DBConfiguration dbConf = getDBConf();
    Class<T> inputClass = (Class<T>) (dbConf.getInputClass());
    String dbProductName = getDBProductName();
    LOG.debug("Creating db record reader for db product: " + dbProductName);

    try {
      return new SQLServerDBRecordReader<T>(split, inputClass,
          conf, getConnection(), dbConf, dbConf.getInputConditions(),
          dbConf.getInputFieldNames(), dbConf.getInputTableName(),
          dbProductName);
    } catch (SQLException ex) {
      throw new IOException(ex);
    }
  }
View Full Code Here

Examples of com.cloudera.sqoop.mapreduce.db.DBConfiguration

        if (inputBoundingQuery == null) {
          inputBoundingQuery = buildBoundaryQuery(splitByCol, sanitizedQuery);
        }
        DataDrivenDBInputFormat.setInput(job, DBWritable.class,
            inputQuery, inputBoundingQuery);
        new DBConfiguration(job.getConfiguration()).setInputOrderBy(
            splitByCol);
      }
      if (options.getRelaxedIsolation()) {
        LOG
          .info("Enabling relaxed (read uncommitted) transaction "
View Full Code Here

Examples of org.apache.hadoop.mapred.lib.db.DBConfiguration

  @SuppressWarnings("unchecked")
  @Test (timeout = 5000)
  public void testDBRecordReader() throws Exception {

    JobConf job = mock(JobConf.class);
    DBConfiguration dbConfig = mock(DBConfiguration.class);
    String[] fields = { "field1", "filed2" };

    @SuppressWarnings("rawtypes")
    DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
        new DBInputSplit(),  NullDBWritable.class, job,
View Full Code Here

Examples of org.apache.sqoop.mapreduce.db.DBConfiguration

  @Override
  public void runExport() throws ExportException, IOException {
    ConnManager cmgr = context.getConnManager();
    SqoopOptions options = context.getOptions();
    Configuration conf = options.getConf();
    DBConfiguration dbConf = null;
    String outputTableName = context.getTableName();
    String tableName = outputTableName;
    String tableClassName =
        new TableClassName(options).getClassForTable(outputTableName);

    LOG.info("Beginning export of " + outputTableName);
    loadJars(conf, context.getJarFile(), tableClassName);

    try {
      Job job = new Job(conf);
      dbConf = new DBConfiguration(job.getConfiguration());
      dbConf.setOutputTableName(tableName);
      configureInputFormat(job, tableName, tableClassName, null);
      configureOutputFormat(job, tableName, tableClassName);
      configureNumTasks(job);
      propagateOptionsToJob(job);
      job.setMapperClass(getMapperClass());
View Full Code Here

Examples of org.apache.sqoop.mapreduce.db.DBConfiguration

  protected void setup(Context context)
      throws IOException, InterruptedException {
    super.setup(context);
    conf = context.getConfiguration();
    dbConf = new DBConfiguration(conf);
    tableName = dbConf.getOutputTableName();
    tmpTableName = tableName + "_" + context.getTaskAttemptID().toString();

    Connection conn = null;
    try {
View Full Code Here

Examples of org.apache.sqoop.mapreduce.db.DBConfiguration


  protected void setup(Context context)
      throws IOException, InterruptedException {
    conf = context.getConfiguration();
    dbConf = new DBConfiguration(conf);
    tableName = dbConf.getOutputTableName();
    try {
      conn = dbConf.getConnection();
      conn.setAutoCommit(false);
    } catch (ClassNotFoundException ex) {
View Full Code Here

Examples of org.apache.sqoop.mapreduce.db.DBConfiguration

  @Override
  /** {@inheritDoc} */
  public void checkOutputSpecs(JobContext context)
      throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    DBConfiguration dbConf = new DBConfiguration(conf);

    // Sanity check all the configuration values we need.
    if (null == conf.get(DBConfiguration.URL_PROPERTY)) {
      throw new IOException("Database connection URL is not set.");
    } else if (null == dbConf.getOutputTableName()) {
      throw new IOException("Procedure name is not set for export");
    } else if (null == dbConf.getOutputFieldNames()
        && 0 == dbConf.getOutputFieldCount()) {
      throw new IOException(
          "Output field names are null and zero output field count set.");
    }
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.