Package com.cloudera.sqoop.util

Examples of com.cloudera.sqoop.util.ImportException


    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      if (!opts.isBulkLoadEnabled()){
        importer = new HBaseImportJob(opts, context);
      } else {
        importer = new HBaseBulkImportJob(opts, context);
      }
    } else if (opts.getAccumuloTable() != null) {
      // Import to Accumulo.
      if (!AccumuloUtil.isAccumuloJarPresent()) {
        throw new ImportException("Accumulo jars are not present in classpath,"
              + " cannot import to Accumulo!");
      }
      importer = new AccumuloImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }
View Full Code Here


    char qc = options.getOutputEnclosedBy();
    char ec = options.getOutputEscapedBy();

    if (qc > 0 && !(qc == '"' || qc == '\'')) {
      throw new ImportException("Output enclosed-by character must be '\"' "
         + "or ''' for netezza direct mode imports");
    }
    if (ec > 0 && ec != '\\') {
      throw new ImportException("Output escaped-by character must be '\\' "
          + "for netezza direct mode exports");
    }

    NetezzaExternalTableImportJob importer = null;
View Full Code Here

    Configuration conf = job.getConfiguration();
    String tableName = conf.get(HBasePutProcessor.TABLE_NAME_KEY);
    String familyName = conf.get(HBasePutProcessor.COL_FAMILY_KEY);

    if (null == tableName) {
      throw new ImportException(
          "Import to HBase error: Table name not specified");
    }

    if (null == familyName) {
      throw new ImportException(
          "Import to HBase error: Column family not specified");
    }

    // Add HBase configuration files to this conf object.
    HBaseConfiguration.addHbaseResources(conf);

    HBaseAdmin admin = new HBaseAdmin(conf);

    // Add authentication token to the job if we're running on secure cluster.
    //
    // We're currently supporting HBase version 0.90 that do not have security
    // patches which means that it do not have required methods
    // "isSecurityEnabled" and "obtainAuthTokenForJob".
    //
    // We're using reflection API to see if those methods are available and call
    // them only if they are present.
    //
    // After we will remove support for HBase 0.90 we can simplify the code to
    // following code fragment:
    /*
    try {
      if (User.isSecurityEnabled()) {
        User user = User.getCurrent();
        user.obtainAuthTokenForJob(conf, job);
      }
    } catch(InterruptedException ex) {
      throw new ImportException("Can't get authentication token", ex);
    }
    */
    try {
      // Get method isSecurityEnabled
      Method isHBaseSecurityEnabled = User.class.getMethod(
          "isHBaseSecurityEnabled", Configuration.class);

      // Get method obtainAuthTokenForJob
      Method obtainAuthTokenForJob = User.class.getMethod(
        "obtainAuthTokenForJob", Configuration.class, Job.class);

      // Get current user
      User user = User.getCurrent();

      // Obtain security token if needed
      if ((Boolean)isHBaseSecurityEnabled.invoke(null, conf)) {
        obtainAuthTokenForJob.invoke(user, conf, job);
      }
    } catch (NoSuchMethodException e) {
      LOG.info("It seems that we're running on HBase without security"
        + " additions. Security additions will not be used during this job.");
    } catch (InvocationTargetException e) {
      throw new ImportException("Can't get authentication token", e);
    } catch (IllegalAccessException e) {
      throw new ImportException("Can't get authentication token", e);
    }

    // Check to see if the table exists.
    HTableDescriptor tableDesc = null;
    byte [] familyBytes = Bytes.toBytes(familyName);
View Full Code Here

    String zookeepers = conf.get(AccumuloConstants.ZOOKEEPERS);
    String instance = conf.get(AccumuloConstants.ACCUMULO_INSTANCE);
    String user = conf.get(AccumuloConstants.ACCUMULO_USER_NAME);

    if (null == tableName) {
      throw new ImportException(
          "Import to Accumulo error: Table name not specified");
    }

    if (null == familyName) {
      throw new ImportException(
          "Import to Accumulo error: Column family not specified");
    }

    try {
      // Set up the libjars
      AccumuloUtil.addJars(job, opts);

      Instance inst = new ZooKeeperInstance(instance, zookeepers);
      String password = conf.get(AccumuloConstants.ACCUMULO_PASSWORD);
      Connector conn = inst.getConnector(user, new PasswordToken(password));
      if (!conn.tableOperations().exists(tableName)) {
        if (options.getCreateAccumuloTable()) {
          LOG.info("Table " + tableName + " doesn't exist, creating.");
          try {
            conn.tableOperations().create(tableName);
          } catch (TableExistsException e) {
            // Should only happen if the table was created
            // by another process between the existence check
            // and the create command
            LOG.info("Table " + tableName + " created by another process.");
          }
        } else {
          throw new ImportException(
              "Table "
                  + tableName
                  + " does not exist, and --accumulo-create-table "
                  + "not specified.");
        }
      }
    } catch (AccumuloException e) {
      throw new ImportException(e);
    } catch (AccumuloSecurityException e) {
      throw new ImportException(e);
    }
    super.jobSetup(job);
  }
View Full Code Here

        if (isDateTimeColumn(checkColumnType)) {
          nextIncrementalValue = (nextVal == null) ? null
            : manager.datetimeToQueryString(nextVal.toString(),
                                            checkColumnType);
        } else if (manager.isCharColumn(checkColumnType)) {
          throw new ImportException("Character column "
            + "(" + options.getIncrementalTestColumn() + ") can not be used "
            + "to determine which rows to incrementally import.");
        } else {
          nextIncrementalValue = (nextVal == null) ? null : nextVal.toString();
        }
      } catch (SQLException sqlE) {
        throw new IOException(sqlE);
      }
      break;
    case DateLastModified:
      if (options.getMergeKeyCol() == null && !options.isAppendMode()
          && fs.exists(getOutputPath(options, context.getTableName(), false))) {
        throw new ImportException("--" + MERGE_KEY_ARG + " or " + "--" + APPEND_ARG
          + " is required when using --" + this.INCREMENT_TYPE_ARG
          + " lastmodified and the output directory exists.");
      }
      checkColumnType = manager.getColumnTypes(options.getTableName(),
        options.getSqlQuery()).get(options.getIncrementalTestColumn());
      nextVal = manager.getCurrentDbTimestamp();
      if (null == nextVal) {
        throw new IOException("Could not get current time from database");
      }
      nextIncrementalValue = manager.datetimeToQueryString(nextVal.toString(),
          checkColumnType);
      break;
    default:
      throw new ImportException("Undefined incremental import type: "
          + incrementalMode);
    }

    // Build the WHERE clause components that are used to import
    // only this incremental section.
    StringBuilder sb = new StringBuilder();
    String prevEndpoint = options.getIncrementalLastValue();

    if (isDateTimeColumn(checkColumnType) && null != prevEndpoint
        && !prevEndpoint.startsWith("\'") && !prevEndpoint.endsWith("\'")) {
      // Incremental imports based on date/time should be 'quoted' in
      // ANSI SQL. If the user didn't specify single-quotes, put them
      // around, here.
      prevEndpoint = manager.datetimeToQueryString(prevEndpoint,
          checkColumnType);
    }

    String checkColName = manager.escapeColName(
        options.getIncrementalTestColumn());
    LOG.info("Incremental import based on column " + checkColName);
    if (null != prevEndpoint) {
      if (prevEndpoint.equals(nextIncrementalValue)) {
        LOG.info("No new rows detected since last import.");
        return false;
      }
      LOG.info("Lower bound value: " + prevEndpoint);
      sb.append(checkColName);
      switch (incrementalMode) {
      case AppendRows:
        sb.append(" > ");
        break;
      case DateLastModified:
        sb.append(" >= ");
        break;
      default:
        throw new ImportException("Undefined comparison");
      }
      sb.append(prevEndpoint);
      sb.append(" AND ");
    }

    if (null != nextIncrementalValue) {
      sb.append(checkColName);
      switch (incrementalMode) {
      case AppendRows:
        sb.append(" <= ");
        break;
      case DateLastModified:
        sb.append(" < ");
        break;
      default:
        throw new ImportException("Undefined comparison");
      }
      sb.append(nextIncrementalValue);
    } else {
      sb.append(checkColName);
      sb.append(" IS NULL ");
View Full Code Here

    super.jobSetup(job);

    // we shouldn't have gotten here if bulk load dir is not set
    // so let's throw a ImportException
    if(getContext().getDestination() == null){
      throw new ImportException("Can't run HBaseBulkImportJob without a " +
          "valid destination directory.");
    }

    TableMapReduceUtil.addDependencyJars(job.getConfiguration(), Preconditions.class);
    FileOutputFormat.setOutputPath(job, getContext().getDestination());
View Full Code Here

    }
    catch (Exception e) {
      String errorMessage = String.format("Unrecoverable error while " +
        "performing the bulk load of files in [%s]",
        bulkLoadDir.toString());
      throw new ImportException(errorMessage, e);
    }
  }
View Full Code Here

      jobSetup(job);
      setJob(job);
      boolean success = runJob(job);
      if (!success) {
        throw new ImportException("Import job failed!");
      }

      completeImport(job);

      if (options.isValidationEnabled()) {
View Full Code Here

        getRowCountFromDB(context.getConnManager(), tableName), // source
        getRowCountFromHadoop(job));                            // target

      doValidate(options, conf, validationContext);
    } catch (ValidationException e) {
      throw new ImportException("Error validating row counts", e);
    } catch (SQLException e) {
      throw new ImportException("Error retrieving DB source row count", e);
    } catch (IOException e) {
      throw new ImportException("Error retrieving target row count", e);
    } catch (InterruptedException e) {
      throw new ImportException("Error retrieving target row count", e);
    }
  }
View Full Code Here

    }

    @Override
    public void importTable(ImportJobContext context)
        throws ImportException {
      throw new ImportException("This manager cannot read tables.");
    }
View Full Code Here

TOP

Related Classes of com.cloudera.sqoop.util.ImportException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.