Package com.cloudera.sqoop.util

Examples of com.cloudera.sqoop.util.ExportException


      cacheJars(job, context.getConnManager());
      setJob(job);

      boolean success = runJob(job);
      if (!success) {
        throw new ExportException("Export job failed!");
      }
    } catch (InterruptedException ie) {
      throw new IOException(ie);
    } catch (ClassNotFoundException cnfe) {
      throw new IOException(cnfe);
View Full Code Here


   * Export data stored in HDFS into a table in a database.
   * This inserts new rows into the target table.
   */
  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support exports");
  }
View Full Code Here

   * Export data stored in HDFS into a table in a database. This calls a stored
   * procedure to insert rows into the target table.
   */
  public void callTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support exports "
        + "using stored procedures");
  }
View Full Code Here

   * This updates existing rows in the target table, based on the
   * updateKeyCol specified in the context's SqoopOptions.
   */
  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support updates");
  }
View Full Code Here

   * This may update or insert rows into the target table depending on
   * whether rows already exist in the target table or not.
   */
  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("Mixed update/insert is not supported"
        + " against the target database yet");
  }
View Full Code Here

      if (cmgr.supportsStagingForExport()) {
        LOG.info("Data will be staged in the table: " + stagingTableName);
        tableName = stagingTableName;
        stagingEnabled = true;
      } else {
        throw new ExportException("The active connection manager ("
            + cmgr.getClass().getCanonicalName()
            + ") does not support staging of data for export. "
            + "Please retry without specifying the --staging-table option.");
      }
    }

    String tableClassName =
        new TableClassName(options).getClassForTable(outputTableName);
    String ormJarFile = context.getJarFile();

    LOG.info("Beginning export of " + outputTableName);
    loadJars(conf, ormJarFile, tableClassName);

    if (stagingEnabled) {
      // Prepare the staging table
      if (options.doClearStagingTable()) {
        try {
          // Delete all records from staging table
          cmgr.deleteAllRecords(stagingTableName);
        } catch (SQLException ex) {
          throw new ExportException(
              "Failed to empty staging table before export run", ex);
        }
      } else {
        // User has not explicitly specified the clear staging table option.
        // Assert that the staging table is empty.
        try {
          long rowCount = cmgr.getTableRowCount(stagingTableName);
          if (rowCount != 0L) {
            throw new ExportException("The specified staging table ("
                + stagingTableName + ") is not empty. To force deletion of "
                + "its data, please retry with --clear-staging-table option.");
          }
        } catch (SQLException ex) {
          throw new ExportException(
              "Failed to count data rows in staging table: "
                  + stagingTableName, ex);
        }
      }
    }

    try {
      Job job = new Job(conf);

      // Set the external jar to use for the job.
      job.getConfiguration().set("mapred.jar", ormJarFile);

      propagateOptionsToJob(job);
      configureInputFormat(job, tableName, tableClassName, null);
      configureOutputFormat(job, tableName, tableClassName);
      configureMapper(job, tableName, tableClassName);
      configureNumTasks(job);
      cacheJars(job, context.getConnManager());
      setJob(job);
      boolean success = runJob(job);
      if (!success) {
        throw new ExportException("Export job failed!");
      }
    } catch (InterruptedException ie) {
      throw new IOException(ie);
    } catch (ClassNotFoundException cnfe) {
      throw new IOException(cnfe);
    } finally {
      unloadJars();
    }

    // Unstage the data if needed
    if (stagingEnabled) {
      // Migrate data from staging table to the output table
      try {
        LOG.info("Starting to migrate data from staging table to destination.");
        cmgr.migrateData(stagingTableName, outputTableName);
      } catch (SQLException ex) {
        LOG.error("Failed to move data from staging table ("
            + stagingTableName + ") to target table ("
            + outputTableName + ")", ex);
        throw new ExportException(
            "Failed to move data from staging table", ex);
      }
    }
  }
View Full Code Here

   * Export data stored in HDFS into a table in a database.
   * This inserts new rows into the target table.
   */
  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support exports");
  }
View Full Code Here

   * This updates existing rows in the target table, based on the
   * updateKeyCol specified in the context's SqoopOptions.
   */
  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("This database does not support updates");
  }
View Full Code Here

   * This may update or insert rows into the target table depending on
   * whether rows already exist in the target table or not.
   */
  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
      throws IOException, ExportException {
    throw new ExportException("Mixed update/insert is not supported"
        + " against the target database yet");
  }
View Full Code Here

      if (cmgr.supportsStagingForExport()) {
        LOG.info("Data will be staged in the table: " + stagingTableName);
        tableName = stagingTableName;
        stagingEnabled = true;
      } else {
        throw new ExportException("The active connection manager ("
            + cmgr.getClass().getCanonicalName()
            + ") does not support staging of data for export. "
            + "Please retry without specifying the --staging-table option.");
      }
    }


    String tableClassName = null;
    if (!cmgr.isORMFacilitySelfManaged()) {
        tableClassName =
            new TableClassName(options).getClassForTable(outputTableName);
    }
    // For ORM self managed, we leave the tableClassName to null so that
    // we don't check for non-existing classes.
    String ormJarFile = context.getJarFile();

    LOG.info("Beginning export of " + outputTableName);
    loadJars(conf, ormJarFile, tableClassName);

    if (stagingEnabled) {
      // Prepare the staging table
      if (options.doClearStagingTable()) {
        try {
          // Delete all records from staging table
          cmgr.deleteAllRecords(stagingTableName);
        } catch (SQLException ex) {
          throw new ExportException(
              "Failed to empty staging table before export run", ex);
        }
      } else {
        // User has not explicitly specified the clear staging table option.
        // Assert that the staging table is empty.
        try {
          long rowCount = cmgr.getTableRowCount(stagingTableName);
          if (rowCount != 0L) {
            throw new ExportException("The specified staging table ("
                + stagingTableName + ") is not empty. To force deletion of "
                + "its data, please retry with --clear-staging-table option.");
          }
        } catch (SQLException ex) {
          throw new ExportException(
              "Failed to count data rows in staging table: "
                  + stagingTableName, ex);
        }
      }
    }

    Job job = new Job(conf);
    try {
      // Set the external jar to use for the job.
      job.getConfiguration().set("mapred.jar", ormJarFile);
      if (options.getMapreduceJobName() != null) {
        job.setJobName(options.getMapreduceJobName());
      }

      propagateOptionsToJob(job);
      if (isHCatJob) {
        LOG.info("Configuring HCatalog for export job");
        SqoopHCatUtilities hCatUtils = SqoopHCatUtilities.instance();
        hCatUtils.configureHCat(options, job, cmgr, tableName,
          job.getConfiguration());
      }
      configureInputFormat(job, tableName, tableClassName, null);
      configureOutputFormat(job, tableName, tableClassName);
      configureMapper(job, tableName, tableClassName);
      configureNumTasks(job);
      cacheJars(job, context.getConnManager());

      jobSetup(job);
      setJob(job);
      boolean success = runJob(job);
      if (!success) {
        throw new ExportException("Export job failed!");
      }

      if (options.isValidationEnabled()) {
        validateExport(tableName, conf, job);
      }
    } catch (InterruptedException ie) {
      throw new IOException(ie);
    } catch (ClassNotFoundException cnfe) {
      throw new IOException(cnfe);
    } finally {
      unloadJars();
      jobTeardown(job);
    }

    // Unstage the data if needed
    if (stagingEnabled) {
      // Migrate data from staging table to the output table
      try {
        LOG.info("Starting to migrate data from staging table to destination.");
        cmgr.migrateData(stagingTableName, outputTableName);
      } catch (SQLException ex) {
        LoggingUtils.logAll(LOG, "Failed to move data from staging table ("
          + stagingTableName + ") to target table ("
          + outputTableName + ")", ex);
        throw new ExportException(
            "Failed to move data from staging table", ex);
      }
    }
  }
View Full Code Here

TOP

Related Classes of com.cloudera.sqoop.util.ExportException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.