Package com.cloudera.sqoop.util

Examples of com.cloudera.sqoop.util.ExportException


      cacheJars(job, context.getConnManager());
      setJob(job);

      boolean success = runJob(job);
      if (!success) {
        throw new ExportException("Export job failed!");
      }
    } catch (InterruptedException ie) {
      throw new IOException(ie);
    } catch (ClassNotFoundException cnfe) {
      throw new IOException(cnfe);
View Full Code Here


      if (cmgr.supportsStagingForExport()) {
        LOG.info("Data will be staged in the table: " + stagingTableName);
        tableName = stagingTableName;
        stagingEnabled = true;
      } else {
        throw new ExportException("The active connection manager ("
            + cmgr.getClass().getCanonicalName()
            + ") does not support staging of data for export. "
            + "Please retry without specifying the --staging-table option.");
      }
    }

    String tableClassName =
        new TableClassName(options).getClassForTable(outputTableName);
    String ormJarFile = context.getJarFile();

    LOG.info("Beginning export of " + outputTableName);
    loadJars(conf, ormJarFile, tableClassName);

    if (stagingEnabled) {
      // Prepare the staging table
      if (options.doClearStagingTable()) {
        try {
          // Delete all records from staging table
          cmgr.deleteAllRecords(stagingTableName);
        } catch (SQLException ex) {
          throw new ExportException(
              "Failed to empty staging table before export run", ex);
        }
      } else {
        // User has not explicitly specified the clear staging table option.
        // Assert that the staging table is empty.
        try {
          long rowCount = cmgr.getTableRowCount(stagingTableName);
          if (rowCount != 0L) {
            throw new ExportException("The specified staging table ("
                + stagingTableName + ") is not empty. To force deletion of "
                + "its data, please retry with --clear-staging-table option.");
          }
        } catch (SQLException ex) {
          throw new ExportException(
              "Failed to count data rows in staging table: "
                  + stagingTableName, ex);
        }
      }
    }

    Job job = new Job(conf);
    try {
      // Set the external jar to use for the job.
      job.getConfiguration().set("mapred.jar", ormJarFile);
      if (options.getMapreduceJobName() != null) {
        job.setJobName(options.getMapreduceJobName());
      }

      propagateOptionsToJob(job);
      configureInputFormat(job, tableName, tableClassName, null);
      configureOutputFormat(job, tableName, tableClassName);
      configureMapper(job, tableName, tableClassName);
      configureNumTasks(job);
      cacheJars(job, context.getConnManager());

      jobSetup(job);
      setJob(job);
      boolean success = runJob(job);
      if (!success) {
        throw new ExportException("Export job failed!");
      }

      if (options.isValidationEnabled()) {
        validateExport(tableName, conf, job);
      }
    } catch (InterruptedException ie) {
      throw new IOException(ie);
    } catch (ClassNotFoundException cnfe) {
      throw new IOException(cnfe);
    } finally {
      unloadJars();
      jobTeardown(job);
    }

    // Unstage the data if needed
    if (stagingEnabled) {
      // Migrate data from staging table to the output table
      try {
        LOG.info("Starting to migrate data from staging table to destination.");
        cmgr.migrateData(stagingTableName, outputTableName);
      } catch (SQLException ex) {
        LOG.error("Failed to move data from staging table ("
            + stagingTableName + ") to target table ("
            + outputTableName + ")", ex);
        throw new ExportException(
            "Failed to move data from staging table", ex);
      }
    }
  }
View Full Code Here

        getRowCountFromHadoop(job),
        getRowCountFromDB(context.getConnManager(), tableName));

      doValidate(options, conf, validationContext);
    } catch (ValidationException e) {
      throw new ExportException("Error validating row counts", e);
    } catch (SQLException e) {
      throw new ExportException("Error retrieving DB target row count", e);
    } catch (IOException e) {
      throw new ExportException("Error retrieving source row count", e);
    } catch (InterruptedException e) {
      throw new ExportException("Error retrieving source row count", e);
    }
  }
View Full Code Here

   * Export data stored in HDFS into a table in a database.
   * This inserts new rows into the target table.
   */
  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support exports");
  }
View Full Code Here

   * Export data stored in HDFS into a table in a database. This calls a stored
   * procedure to insert rows into the target table.
   */
  public void callTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support exports "
        + "using stored procedures");
  }
View Full Code Here

   * This updates existing rows in the target table, based on the
   * updateKeyCol specified in the context's SqoopOptions.
   */
  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support updates");
  }
View Full Code Here

   * This may update or insert rows into the target table depending on
   * whether rows already exist in the target table or not.
   */
  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("Mixed update/insert is not supported"
        + " against the target database yet");
  }
View Full Code Here

    exportJob.runExport();
  }

  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("MySQL direct connector does not support upsert"
      + " mode. Please use JDBC based connector (remove --direct parameter)");
  }
View Full Code Here

    char ec = (char) options.getInputEscapedBy();
    checkNullValueStrings(options.getInNullStringValue(),
        options.getInNullNonStringValue());

    if (qc > 0 && !(qc == '"' || qc == '\'')) {
      throw new ExportException("Input enclosed-by character must be '\"' "
         + "or ''' for netezza direct mode exports");
    }
    if (ec > 0 && ec != '\\') {
      throw new ExportException("Input escaped-by character must be '\\' "
          + "for netezza direct mode exports");
    }
    exporter = new NetezzaExternalTableExportJob(context);
    exporter.runExport();
  }
View Full Code Here

      throws IOException, ExportException {
    if (options.getNumMappers() > 1) {
      String msg = "Netezza update with multiple mappers can lead to "
          + "inconsistencies - Please set num-mappers option to 1 in the SQOOP "
          + "command line for update jobs with Netezza and SQOOP";
      throw new ExportException(msg);
    }

    if (!options.isBatchMode()) {
      if (!NetezzaManager.batchModeWarningPrinted) {
        LOG.warn("It looks like you are exporting to Netezza in non-batch ");
View Full Code Here

TOP

Related Classes of com.cloudera.sqoop.util.ExportException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.