Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.Counters.findCounter()


              (int)counters.findCounter(REDUCE_INPUT_RECORDS).getValue();
            runOutputBytes[i] =
              counters.findCounter("FileSystemCounters",
                  "HDFS_BYTES_WRITTEN").getValue();
            runOutputRecords[i] =
              (int)counters.findCounter(REDUCE_OUTPUT_RECORDS).getValue();


            specInfo = spec.getTaskInfo(TaskType.REDUCE, i);
            // There is no reliable counter for reduce input bytes. The
            // variable-length encoding of intermediate records and other noise
View Full Code Here


            specOutputBytes[i] = specInfo.getOutputBytes();
            System.out.printf(type + " SPEC: (%9d) -> %9d :: %5d -> %5d\n",
                 specInfo.getInputBytes(), specOutputBytes[i],
                 specInputRecords[i], specOutputRecords[i]);
            System.out.printf(type + " RUN:  (%9d) -> %9d :: %5d -> %5d\n",
                 counters.findCounter(REDUCE_SHUFFLE_BYTES).getValue(),
                 runOutputBytes[i], runInputRecords[i], runOutputRecords[i]);
            break;
          default:
            specInfo = null;
            fail("Unexpected type: " + type);
View Full Code Here

          || System.currentTimeMillis() >= reportTime + maxReportInterval) {
        // find out CPU msecs
        // In the case that we can't find out this number, we just skip the step to print
        // it out.
        if (ctrs != null) {
          Counter counterCpuMsec = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
              "CPU_MILLISECONDS");
          if (counterCpuMsec != null) {
            long newCpuMSec = counterCpuMsec.getValue();
            if (newCpuMSec > 0) {
              cpuMsec = newCpuMSec;
View Full Code Here

        success = rj.isSuccessful();
      }
    }

    if (ctrs != null) {
      Counter counterCpuMsec = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter",
          "CPU_MILLISECONDS");
      if (counterCpuMsec != null) {
        long newCpuMSec = counterCpuMsec.getValue();
        if (newCpuMSec > cpuMsec) {
          cpuMsec = newCpuMSec;
View Full Code Here

    sLogger.info("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0
        + " seconds");

    Counters counters = job.getCounters();

    long totalNumTerms = counters.findCounter("org.apache.hadoop.mapred.Task$Counter", 6,
        "REDUCE_INPUT_GROUPS").getCounter();

    sLogger.info("total number of terms in global dictionary = " + totalNumTerms);

    // now build the dictionary
View Full Code Here

    long startTime = System.currentTimeMillis();
    RunningJob j = JobClient.runJob(job);
    System.out.println("Job finished in "+(System.currentTimeMillis()-startTime)+" milliseconds");
    Counters counters = j.getCounters();
    long processed = (long) counters.findCounter(mapoutput.PROCESSEDPAIRS).getCounter();
    long prefixsum = (long) counters.findCounter(mapoutput.PrefixSum).getCounter();
    System.out.println("Avg prefix length = "+(prefixsum/(float)processed));
   
    return 0;
  }
View Full Code Here

    long startTime = System.currentTimeMillis();
    RunningJob j = JobClient.runJob(job);
    System.out.println("Job finished in "+(System.currentTimeMillis()-startTime)+" milliseconds");
    Counters counters = j.getCounters();
    long processed = (long) counters.findCounter(mapoutput.PROCESSEDPAIRS).getCounter();
    long prefixsum = (long) counters.findCounter(mapoutput.PrefixSum).getCounter();
    System.out.println("Avg prefix length = "+(prefixsum/(float)processed));
   
    return 0;
  }
View Full Code Here

    RunningJob rj = JobClient.runJob(conf);
    sLogger.info("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0
        + " seconds");
    Counters counters = rj.getCounters();

    long numOfDocs= (long) counters.findCounter(Docs.Total).getCounter();

    return (int) numOfDocs;
  }
}
View Full Code Here

  public void testCommandLine() throws Exception  {
    super.testCommandLine();
    // validate combiner counters
    String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
    Counters counters = job.running_.getCounters();
    assertTrue(counters.findCounter(
               counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
    assertTrue(counters.findCounter(
               counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
  }
View Full Code Here

    // validate combiner counters
    String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
    Counters counters = job.running_.getCounters();
    assertTrue(counters.findCounter(
               counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
    assertTrue(counters.findCounter(
               counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
  }

  public static void main(String[]args) throws Exception
  {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.