Examples of ChukwaRecordKey


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

    TsProcessor p = new TsProcessor();
    p.reset(chunk);
    p.process(null, chunk, output, Reporter.NULL);

    ChukwaRecordKey key = buildKey(date, DATA_SOURCE, DATA_TYPE);
    Map<ChukwaRecordKey, ChukwaRecord> outputData = output.data;

    assertNotNull("No output data found.", outputData);
    assertEquals("Output data size not correct.", 1, outputData.size());
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

    calendar.setTime(date);
    calendar.set(Calendar.MINUTE, 0);
    calendar.set(Calendar.SECOND, 0);
    calendar.set(Calendar.MILLISECOND, 0);

    ChukwaRecordKey key = new ChukwaRecordKey();
    key.setKey("" + calendar.getTimeInMillis() + "/" + dataSource + "/" + date.getTime());
    key.setReduceType(dataType);

    return key;
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

        rec = values.next();
        bytes += Long.valueOf(rec.getValue("bytes"));
       
        /* output raw values to different data type for uses which
         * require detailed per-operation data */
        ChukwaRecordKey detailed_key = new ChukwaRecordKey();
        String [] k = key.getKey().split("/");
        String full_timestamp = null;
        full_timestamp = rec.getValue("actual_time");
        detailed_key.setReduceType("ClientTraceDetailed");
        detailed_key.setKey(k[0]+"/"+k[1]+"_"+k[2]+"/"+full_timestamp);
        output.collect(detailed_key, rec);
      }
      if (null == rec) {
        return;
      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          count++;
        } else {
          count--;
        }
      }
      ChukwaRecordKey newKey = new ChukwaRecordKey();
      newKey.setKey("" + record.getTime());
      newKey.setReduceType("MSSRGraph");
      ChukwaRecord newRecord = new ChukwaRecord();
      newRecord.add(Record.tagsField, record.getValue(Record.tagsField));
      newRecord.setTime(record.getTime());
      newRecord.add("count", "" + count);
      newRecord.add("JOBID", record.getValue("JOBID"));
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

        // TASKID="tip_200804210403_0005_m_000018"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0"
        // START_TIME="1208760437531"
        // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_005494"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0"
        // TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760624124"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("FINISH_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/E");
        output.collect(key, record);
      }

      else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("START_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // START_TIME="1208760454885"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167"
        // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395"
        // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/E");
        output.collect(key, record);

        // SORT
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SORT_FINISHED", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/E");
        output.collect(key, record);

        // Reduce
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/E");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job")
          && keys.containsKey("COUNTERS")) {
        // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906816"
        // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912"
        // FINISHED_REDUCES="739" FAILED_MAPS="0" FAILED_REDUCES="0"
        // COUNTERS="File Systems.Local bytes read:1735053407244,File
        // Systems.Local bytes written:2610106384012,File Systems.HDFS
        // bytes read:801605644910,File Systems.HDFS bytes
        // written:44135800,
        // Job Counters .Launched map tasks:5912,Job Counters .Launched
        // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
        // Counters .Rack-local map tasks:316,Map-Reduce Framework.
        // Map input records:9410696067,Map-Reduce Framework.Map output
        // records:9410696067,Map-Reduce Framework.Map input
        // bytes:801599188816,Map-Reduce Framework.Map output
        // bytes:784427968116,
        // Map-Reduce Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0,Map-Reduce
        // Framework.Reduce input groups:477265,Map-Reduce
        // Framework.Reduce input records:739000,
        // Map-Reduce Framework.Reduce output records:739000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "MRJobCounters");
        extractCounters(record, keys.get("COUNTERS"));

        String jobId = keys.get("JOBID").replace("_", "").substring(3);
        record.add("JobId", "" + jobId);

        // FIXME validate this when HodId will be available
        if (keys.containsKey("HODID")) {
          record.add("HodId", keys.get("HODID"));
        }

        // log.info("MRJobCounters +1");
        output.collect(key, record);
      }

      if (keys.containsKey("TASK_TYPE")
          && keys.containsKey("COUNTERS")
          && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get(
              "TASK_TYPE").equalsIgnoreCase("MAP"))) {
        // MAP
        // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP"
        // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883"
        // COUNTERS="File Systems.Local bytes read:159265655,File
        // Systems.Local bytes written:318531310,
        // File Systems.HDFS bytes read:145882417,Map-Reduce
        // Framework.Map input records:1706604,
        // Map-Reduce Framework.Map output records:1706604,Map-Reduce
        // Framework.Map input bytes:145882057,
        // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce
        // Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0"

        // REDUCE
        // Task TASKID="tip_200804210403_0005_r_000524"
        // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760877072"
        // COUNTERS="File Systems.Local bytes read:1179319677,File
        // Systems.Local bytes written:1184474889,File Systems.HDFS
        // bytes written:59021,
        // Map-Reduce Framework.Reduce input groups:684,Map-Reduce
        // Framework.Reduce input records:1000,Map-Reduce
        // Framework.Reduce output records:1000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime");
        extractCounters(record, keys.get("COUNTERS"));
        record.add("JOBID", keys.get("JOBID"));
        record.add("TASKID", keys.get("TASKID"));
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable {
    LogEntry log = new LogEntry(recordEntry);
    PsOutput ps = new PsOutput(log.getBody());
    for (HashMap<String, String> processInfo : ps.getProcessList()) {
      key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, null, log.getDate().getTime(), reduceType);
      for (Entry<String, String> entry : processInfo.entrySet()) {
        record.add(entry.getKey(), entry.getValue());
      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      int idx = jobConf.indexOf("mapredsystem/");
      idx += 13;
      int idx2 = jobConf.indexOf(".", idx);
      data.put("HodId", jobConf.substring(idx, idx2));

      ChukwaRecordKey newKey = new ChukwaRecordKey();
      newKey.setKey("" + initTime);
      newKey.setReduceType("MRJob");

      ChukwaRecord newRecord = new ChukwaRecord();
      newRecord.add(Record.tagsField, record.getValue(Record.tagsField));
      newRecord.setTime(initTime);
      newRecord.add(Record.tagsField, record.getValue(Record.tagsField));
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

public class MockMapProcessor extends AbstractProcessor {

  protected void parse(String recordEntry,
                       OutputCollector<ChukwaRecordKey, ChukwaRecord> output,
                       Reporter reporter) throws Throwable {
    ChukwaRecordKey key = new ChukwaRecordKey("someReduceType", recordEntry);
    ChukwaRecord record = new ChukwaRecord();

    output.collect(key, record);
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

    // Hard-coded to check the contents of test/samples/JobLog.log
    try {

      Pattern task_id_pat = Pattern.compile("attempt_[0-9]*_[0-9]*_[mr]_([0-9]*)_[0-9]*");

      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      // initialize data structures for checking FSM
      // should see 10 maps, 8 reduces
      boolean mapSeen[] = new boolean[10];
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

        fsmSink + "*/*/*/*.evt");
      FileStatus [] files;
      files = fileSys.globStatus(fsm_outputs);
      int count = 0;
      int numHDFSRead = 0, numHDFSWrite = 0, numShuffles = 0;
      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      for (int i = 0; i < files.length; i++) {
        SequenceFile.Reader r = new SequenceFile.Reader(fileSys, files[i].getPath(), conf);
        System.out.println("Processing files " + files[i].getPath().toString());
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.