Package com.cloudera.sqoop.SqoopOptions

Examples of com.cloudera.sqoop.SqoopOptions.InvalidOptionsException


      if ("updateonly".equals(updateTypeStr)) {
        out.setUpdateMode(UpdateMode.UpdateOnly);
      } else if ("allowinsert".equals(updateTypeStr)) {
        out.setUpdateMode(UpdateMode.AllowInsert);
      } else {
        throw new InvalidOptionsException("Unknown new update mode: "
            + updateTypeStr + ". Use 'updateonly' or 'allowinsert'."
            + HELP_STR);
      }
    }
  }
View Full Code Here


    if (in.hasOption(HELP_ARG)) {
      ToolOptions toolOpts = new ToolOptions();
      configureOptions(toolOpts);
      printHelp(toolOpts);
      throw new InvalidOptionsException("");
    }

    if (in.hasOption(CONNECT_STRING_ARG)) {
      out.setConnectString(in.getOptionValue(CONNECT_STRING_ARG));
    }

    if (in.hasOption(CONN_MANAGER_CLASS_NAME)) {
        out.setConnManagerClassName(in.getOptionValue(CONN_MANAGER_CLASS_NAME));
    }

    if (in.hasOption(CONNECT_PARAM_FILE)) {
      File paramFile = new File(in.getOptionValue(CONNECT_PARAM_FILE));
      if (!paramFile.exists()) {
        throw new InvalidOptionsException(
                "Specified connection parameter file not found: " + paramFile);
      }
      InputStream inStream = null;
      Properties connectionParams = new Properties();
      try {
        inStream = new FileInputStream(
                      new File(in.getOptionValue(CONNECT_PARAM_FILE)));
        connectionParams.load(inStream);
      } catch (IOException ex) {
        LOG.warn("Failed to load connection parameter file", ex);
        throw new InvalidOptionsException(
                "Error while loading connection parameter file: "
                + ex.getMessage());
      } finally {
        if (inStream != null) {
          try {
View Full Code Here

      out.setPasswordFromConsole();
    }

    if (in.hasOption(PASSWORD_PATH_ARG)) {
      if (in.hasOption(PASSWORD_ARG) || in.hasOption(PASSWORD_PROMPT_ARG)) {
        throw new InvalidOptionsException("Either password or path to a "
          + "password file must be specified but not both.");
      }

      try {
        out.setPasswordFilePath(in.getOptionValue(PASSWORD_PATH_ARG));
        // apply password from file into password in options
        out.setPassword(CredentialsUtil.fetchPasswordFromFile(out));
      } catch (IOException ex) {
        LOG.warn("Failed to load connection parameter file", ex);
        throw new InvalidOptionsException(
          "Error while loading connection parameter file: "
            + ex.getMessage());
      }
    }
  }
View Full Code Here

    throws InvalidOptionsException {
    try {
      return Class.forName(className, true,
        Thread.currentThread().getContextClassLoader());
    } catch (ClassNotFoundException e) {
      throw new InvalidOptionsException(e.getMessage());
    }
  }
View Full Code Here

  }

  protected void validateCommonOptions(SqoopOptions options)
      throws InvalidOptionsException {
    if (options.getConnectString() == null) {
      throw new InvalidOptionsException(
          "Error: Required argument --connect is missing."
          + HELP_STR);
    }
  }
View Full Code Here

  }

  protected void validateCodeGenOptions(SqoopOptions options)
      throws InvalidOptionsException {
    if (options.getClassName() != null && options.getPackageName() != null) {
      throw new InvalidOptionsException(
          "--class-name overrides --package-name. You cannot use both."
          + HELP_STR);
    }
  }
View Full Code Here

  protected void validateHiveOptions(SqoopOptions options)
      throws InvalidOptionsException {
    if (options.getHiveDelimsReplacement() != null
            && options.doHiveDropDelims()) {
      throw new InvalidOptionsException("The " + HIVE_DROP_DELIMS_ARG
              + " option conflicts with the " + HIVE_DELIMS_REPLACEMENT_ARG
              + " option." + HELP_STR);
    }

    // Make sure that one of hCatalog or hive jobs are used
    String hCatTable = options.getHCatTableName();
    if (hCatTable != null && options.doHiveImport()) {
      throw new InvalidOptionsException("The " + HCATALOG_TABLE_ARG
        + " option conflicts with the " + HIVE_IMPORT_ARG
        + " option." + HELP_STR);
    }

    if(options.doHiveImport()
        && options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) {
      throw new InvalidOptionsException("Hive import is not compatible with "
        + "importing into AVRO format.");
    }

    if(options.doHiveImport()
        && options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
      throw new InvalidOptionsException("Hive import is not compatible with "
        + "importing into SequenceFile format.");
    }

    // Many users are reporting issues when they are trying to import data
    // directly into hive warehouse. This should prevent users from doing
View Full Code Here

        + " in HCatalog jobs for most of the output types except text based "
        + " formats is text. It is better "
        + "to use --hive-import in those cases.  For non text formats, ");
    }
    if (options.doHiveImport()) {
      throw new InvalidOptionsException("The " + HCATALOG_TABLE_ARG
        + " option conflicts with the " + HIVE_IMPORT_ARG
        + " option." + HELP_STR);
    }
    if (options.getTargetDir() != null) {
      throw new InvalidOptionsException("The " + TARGET_DIR_ARG
        + " option conflicts with the " + HCATALOG_TABLE_ARG
        + " option." + HELP_STR);
    }
    if (options.getWarehouseDir() != null) {
      throw new InvalidOptionsException("The " + WAREHOUSE_DIR_ARG
        + " option conflicts with the " + HCATALOG_TABLE_ARG
        + " option." + HELP_STR);
    }
    if (options.isDirect()) {
      throw new InvalidOptionsException("Direct import is incompatible with "
        + "HCatalog. Please remove the parameter --direct");
    }
    if (options.isAppendMode()) {
      throw new InvalidOptionsException("Append mode for imports is not "
        + " compatible with HCatalog. Please remove the parameter"
        + "--append-mode");
    }
    if (options.getExportDir() != null) {
      throw new InvalidOptionsException("The " + EXPORT_PATH_ARG
        + " option conflicts with the " + HCATALOG_TABLE_ARG
        + " option." + HELP_STR);
    }

    if (options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) {
      throw new InvalidOptionsException("HCatalog job is not compatible with "
        + " AVRO format option " + FMT_AVRODATAFILE_ARG
        + " option." + HELP_STR);

    }

    if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
      throw new InvalidOptionsException("HCatalog job  is not compatible with "
        + "SequenceFile format option " + FMT_SEQUENCEFILE_ARG
        + " option." + HELP_STR);

    }
  }
View Full Code Here

  protected void validateHBaseOptions(SqoopOptions options)
      throws InvalidOptionsException {
    if ((options.getHBaseColFamily() != null && options.getHBaseTable() == null)
        || (options.getHBaseColFamily() == null
        && options.getHBaseTable() != null)) {
      throw new InvalidOptionsException(
          "Both --hbase-table and --column-family must be set together."
          + HELP_STR);
    }
    if (options.getHBaseTable() != null && options.isDirect()) {
      throw new InvalidOptionsException("Direct import is incompatible with "
        + "HBase. Please remove parameter --direct");
    }
  }
View Full Code Here

    // mysqldump or other commands we rely on.
    options.setExtraArgs(getSubcommandArgs(extraArguments));
    int dashPos = getDashPosition(extraArguments);

    if (hasUnrecognizedArgs(extraArguments, 0, dashPos)) {
      throw new InvalidOptionsException(HELP_STR);
    }

    validateImportOptions(options);
    validateIncrementalOptions(options);
    validateCommonOptions(options);
View Full Code Here

TOP

Related Classes of com.cloudera.sqoop.SqoopOptions.InvalidOptionsException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.