Examples of OutputJobInfo


Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

      os.close();
    }

    //create job
    Path workingDir = new Path(methodTestDir, "mr_work");
    OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
      tableName, null);
    Job job = configureJob(testName, conf, workingDir, MapHCatWrite.class,
      outputJobInfo, inputPath);
    assertTrue(job.waitForCompletion(true));
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

      os.write(Bytes.toBytes(data[i] + "\n"));
      os.close();
    }

    Path workingDir = new Path(methodTestDir, "mr_abort");
    OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
      tableName, null);
    Job job = configureJob(testName, conf, workingDir, MapWriteAbortTransaction.class,
      outputJobInfo, inputPath);
    assertFalse(job.waitForCompletion(true));
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    partitionValues.put("colname", "p1");
    //null server url means local mode
    HCatTableInfo info = HCatTableInfo.getOutputTableInfo(null, null, dbName, tblName, partitionValues);

    HCatOutputFormat.setOutput(job, info);
    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(job);

    assertNotNull(jobInfo.getTableInfo());
    assertEquals(1, jobInfo.getTableInfo().getPartitionValues().size());
    assertEquals("p1", jobInfo.getTableInfo().getPartitionValues().get("colname"));
    assertEquals(1, jobInfo.getTableSchema().getFields().size());
    assertEquals("colname", jobInfo.getTableSchema().getFields().get(0).getName());

    StorerInfo storer = jobInfo.getStorerInfo();
    assertEquals(RCFileOutputDriver.class.getName(), storer.getOutputSDClass());

    publishTest(job);
  }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

  }

  private OutputFormat<WritableComparable<?>, Object> getOutputFormat(JobConf job)
    throws IOException {
    String outputInfo = job.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
    OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outputInfo);
    OutputFormat<WritableComparable<?>, Object> outputFormat = null;
    if (HBaseHCatStorageHandler.isBulkMode(outputJobInfo)) {
      outputFormat = new HBaseBulkOutputFormat();
    } else {
      outputFormat = new HBaseDirectOutputFormat();
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    }

    private OutputFormat<WritableComparable<?>, Put> getOutputFormat(JobConf job)
        throws IOException {
        String outputInfo = job.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outputInfo);
        OutputFormat<WritableComparable<?>, Put> outputFormat = null;
        if (HBaseHCatStorageHandler.isBulkMode(outputJobInfo)) {
            outputFormat = new HBaseBulkOutputFormat();
        } else {
            outputFormat = new HBaseDirectOutputFormat();
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

        job.set(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY, tableName);

        //manually create transaction
        RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf);
        try {
            OutputJobInfo outputJobInfo = OutputJobInfo.create("default", tableName, null);
            Transaction txn = rm.beginWriteTransaction(tableName, Arrays.asList(familyName));
            outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY,
                HCatUtil.serialize(txn));
            job.set(HCatConstants.HCAT_KEY_OUTPUT_INFO,
                HCatUtil.serialize(outputJobInfo));
        } finally {
            rm.close();
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

            os.close();
        }

        //create job
        Path workingDir = new Path(methodTestDir, "mr_work");
        OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
            tableName, null);
        Job job = configureJob(testName, conf, workingDir, MapHCatWrite.class,
            outputJobInfo, inputPath);
        assertTrue(job.waitForCompletion(true));
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

            os.write(Bytes.toBytes(data[i] + "\n"));
            os.close();
        }

        Path workingDir = new Path(methodTestDir, "mr_abort");
        OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
            tableName, null);
        Job job = configureJob(testName, conf, workingDir, MapWriteAbortTransaction.class,
            outputJobInfo, inputPath);
        assertFalse(job.waitForCompletion(true));
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            for (int i = 1; i < vals.length; i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0], schema, pair[1]);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

        public static String failedKey;
        private static int count = 0;

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            synchronized (MapWriteAbortTransaction.class) {
                if (count == 2) {
                    failedKey = vals[0];
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.