Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.JobContext


   * Check that the iterator options are getting stored in the Job conf correctly.
   */
  @SuppressWarnings("deprecation")
  @Test
  public void testSetIteratorOption() {
    JobContext job = ContextFactory.createJobContext();
    AccumuloInputFormat.setIteratorOption(job, "someIterator", "aKey", "aValue");

    Configuration conf = job.getConfiguration();
    String options = conf.get("AccumuloInputFormat.iterators.options");
    assertEquals(new String("someIterator:aKey:aValue"), options);
  }
View Full Code Here


   * Test getting iterator options for multiple options set
   */
  @SuppressWarnings("deprecation")
  @Test
  public void testGetIteratorOption() {
    JobContext job = ContextFactory.createJobContext();

    AccumuloInputFormat.setIteratorOption(job, "iterator1", "key1", "value1");
    AccumuloInputFormat.setIteratorOption(job, "iterator2", "key2", "value2");
    AccumuloInputFormat.setIteratorOption(job, "iterator3", "key3", "value3");

View Full Code Here

  }

  @SuppressWarnings("deprecation")
  @Test
  public void testSetRegex() {
    JobContext job = ContextFactory.createJobContext();

    String regex = ">\"*%<>\'\\";

    AccumuloInputFormat.setRegex(job, org.apache.accumulo.core.client.mapreduce.InputFormatBase.RegexType.ROW, regex);

View Full Code Here

      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
      bw.addMutation(m);
    }
    bw.close();

    JobContext job = ContextFactory.createJobContext();
    AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable2", new Authorizations());
    AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
    AccumuloInputFormat input = new AccumuloInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job);
    RecordReader<Key,Value> rr = input.createRecordReader(ris, tac);
    rr.initialize(ris, tac);
View Full Code Here

      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
      bw.addMutation(m);
    }
    bw.close();

    JobContext job = ContextFactory.createJobContext();
    AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable3", new Authorizations());
    AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
    final String regex = ".*1.*";
    AccumuloInputFormat.setRegex(job, org.apache.accumulo.core.client.mapreduce.InputFormatBase.RegexType.ROW, regex);
    AccumuloInputFormat input = new AccumuloInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job);
View Full Code Here

  }

  @SuppressWarnings("deprecation")
  @Test
  public void testCorrectRangeInputSplits() throws Exception {
    JobContext job = ContextFactory.createJobContext();

    String username = "user", table = "table", rowRegex = "row.*", colfRegex = "colf.*", colqRegex = "colq.*";
    String valRegex = "val.*", instance = "instance";
    byte[] password = "password".getBytes();
    Authorizations auths = new Authorizations("foo");
View Full Code Here

      m.put(k.getColumnFamily(), k.getColumnQualifier(), new ColumnVisibility(k.getColumnVisibility()), k.getTimestamp(), e.getValue());
      bw.addMutation(m);
    }
    bw.close();
   
    JobContext job = ContextFactory.createJobContext();
    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance1");
    ChunkInputFormat cif = new ChunkInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
    RecordReader<List<Entry<Key,Value>>,InputStream> rr = cif.createRecordReader(ris, tac);
    rr.initialize(ris, tac);
   
    assertTrue(rr.nextKeyValue());
    List<Entry<Key,Value>> info = rr.getCurrentKey();
View Full Code Here

      m.put(k.getColumnFamily(), k.getColumnQualifier(), new ColumnVisibility(k.getColumnVisibility()), k.getTimestamp(), e.getValue());
      bw.addMutation(m);
    }
    bw.close();
   
    JobContext job = ContextFactory.createJobContext();
    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance2");
    ChunkInputFormat cif = new ChunkInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
    RecordReader<List<Entry<Key,Value>>,InputStream> crr = cif.createRecordReader(ris, tac);
    crr.initialize(ris, tac);
   
    assertTrue(crr.nextKeyValue());
    InputStream cis = crr.getCurrentValue();
View Full Code Here

      m.put(k.getColumnFamily(), k.getColumnQualifier(), new ColumnVisibility(k.getColumnVisibility()), k.getTimestamp(), e.getValue());
      bw.addMutation(m);
    }
    bw.close();
   
    JobContext job = ContextFactory.createJobContext();
    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance3");
    ChunkInputFormat cif = new ChunkInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
    RecordReader<List<Entry<Key,Value>>,InputStream> crr = cif.createRecordReader(ris, tac);
    crr.initialize(ris, tac);
   
    assertTrue(crr.nextKeyValue());
    List<Entry<Key,Value>> info = crr.getCurrentKey();
View Full Code Here

          InetAddress ip = InetAddress.getLocalHost();
          if (ip != null) {
            job.setJobSubmitHostAddress(ip.getHostAddress());
            job.setJobSubmitHostName(ip.getHostName());
          }
          JobContext context = new JobContext(jobCopy, jobId);

          jobCopy = (JobConf)context.getConfiguration();

          // Check the output specification
          if (reduces == 0 ? jobCopy.getUseNewMapper() :
            jobCopy.getUseNewReducer()) {
            org.apache.hadoop.mapreduce.OutputFormat<?,?> output =
              ReflectionUtils.newInstance(context.getOutputFormatClass(),
                  jobCopy);
            output.checkOutputSpecs(context);
          } else {
            jobCopy.getOutputFormat().checkOutputSpecs(fs, jobCopy);
          }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.JobContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.