Examples of ChukwaRecord


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    log.info("File: [" +  file + "]" + fs.exists(new Path(file)));
    try {
      reader = new SequenceFile.Reader(fs, new Path(file), conf);

      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      StringBuilder sb = new StringBuilder();
      while (reader.next(key, record)) {
      
        sb.append("===== KEY   =====");

        sb.append("DataType: " + key.getReduceType());
        sb.append("Key: " + key.getKey());
        sb.append("===== Value =====");

        String[] fields = record.getFields();
        Arrays.sort(fields );
        sb.append("Timestamp : " + record.getTime());
        for (String field : fields) {
          sb.append("[" + field + "] :" + record.getValue(field));
        }
      }
     
      return sb.toString();
    } catch (Throwable e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    metricsList.fromXml(xml);
    String recType = metricsList.getRecordType();
    long timestamp = metricsList.getTimestamp();
    for (ChukwaMetrics metrics : metricsList.getMetricsList()) {
      key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, null, -1l, recType);
      record.setTime(timestamp);
      key.setKey(getKey(timestamp, metrics.getKey()));
      record.add("key", metrics.getKey());
      for (Entry<String, String> attr : metrics.getAttributes().entrySet()) {
        record.add(attr.getKey(), attr.getValue());
      }
      output.collect(key, record);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    Map<ChukwaRecordKey, ChukwaRecord> outputData = output.data;

    assertNotNull("No output data found.", outputData);
    assertEquals("Output data size not correct.", 1, outputData.size());

    ChukwaRecord record = outputData.get(key);
    assertNotNull("Output record not found.", record);
    assertEquals("Output record time not correct.", date.getTime(), record.getTime());
    assertEquals("Output record body not correct.", recordData,
            new String(record.getMapFields().get("body").get()));
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

            Iterator<ChukwaRecord> values,
            OutputCollector<ChukwaRecordKey, ChukwaRecord> output,
            Reporter reporter) {
    try {
      long bytes = 0L;
      ChukwaRecord rec = null;
      while (values.hasNext()) {
        /* aggregate bytes for current key */
        rec = values.next();
        bytes += Long.valueOf(rec.getValue("bytes"));
       
        /* output raw values to different data type for uses which
         * require detailed per-operation data */
        ChukwaRecordKey detailed_key = new ChukwaRecordKey();
        String [] k = key.getKey().split("/");
        String full_timestamp = null;
        full_timestamp = rec.getValue("actual_time");
        detailed_key.setReduceType("ClientTraceDetailed");
        detailed_key.setKey(k[0]+"/"+k[1]+"_"+k[2]+"/"+full_timestamp);
        output.collect(detailed_key, rec);
      }
      if (null == rec) {
        return;
      }
      ChukwaRecord emit = new ChukwaRecord();
      emit.add(Record.tagsField, rec.getValue(Record.tagsField));
      emit.add(Record.sourceField, "undefined"); // TODO
      emit.add(Record.applicationField, rec.getValue(Record.applicationField));

      String[] k = key.getKey().split("/");
      emit.add(k[1] + "_" + k[2], String.valueOf(bytes));
      emit.setTime(Long.valueOf(k[3]));
      output.collect(key, emit);

    } catch (IOException e) {
      log.warn("Unable to collect output in SystemMetricsReduceProcessor [" + key + "]", e);
    }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      idx = recordEntry.indexOf(' ', start);
      // String className = recordEntry.substring(start, idx-1);
      String body = recordEntry.substring(idx + 1);

      Hashtable<String, String> keys = new Hashtable<String, String>();
      ChukwaRecord record = null;

      int firstSep = body.indexOf(" ");
      keys.put("RECORD_TYPE", body.substring(0, firstSep));
      // log.info("JobLogHistoryProcessor Add field: [RECORD_TYPE]["
      // + keys.get("RECORD_TYPE") + "]");

      body = body.substring(firstSep);

      internalMatcher.reset(body);

      // String fieldName = null;
      // String fieldValue = null;

      while (internalMatcher.matches()) {

        keys.put(internalMatcher.group(1).trim(), internalMatcher.group(2)
            .trim());

        // TODO Remove debug info before production
        // fieldName = internalMatcher.group(1).trim();
        // fieldValue = internalMatcher.group(2).trim();
        // log.info("JobLogHistoryProcessor Add field: [" + fieldName +
        // "][" + fieldValue +"]" );
        // log.info("EOL : [" + internalMatcher.group(3) + "]" );
        internalMatcher.reset(internalMatcher.group(3));
      }

      if (!keys.containsKey("JOBID")) {
        // Extract JobID from taskID
        // JOBID = "job_200804210403_0005"
        // TASKID = "tip_200804210403_0005_m_000018"
        String jobId = keys.get("TASKID");
        int idx1 = jobId.indexOf('_', 0);
        int idx2 = jobId.indexOf('_', idx1 + 1);
        idx2 = jobId.indexOf('_', idx2 + 1);
        keys.put("JOBID", "job" + jobId.substring(idx1, idx2));
        // log.info("JobLogHistoryProcessor Add field: [JOBID]["
        // + keys.get("JOBID") + "]");
      }

      // if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("SUBMIT_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" JOBNAME="MY_JOB"
      // USER="userxxx"
      // // SUBMIT_TIME="1208760436751"
      // JOBCONF="/mapredsystem/xxx.yyy.com/job_200804210403_0005/job.xml"
      //         
      //         
      // }
      // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("LAUNCH_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" LAUNCH_TIME="1208760437110"
      // TOTAL_MAPS="5912" TOTAL_REDUCES="739"
      //         
      // }
      // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("FINISH_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906816"
      // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912" FINISHED_REDUCES="739"
      // FAILED_MAPS="0" FAILED_REDUCES="0"
      // // COUNTERS="File Systems.Local bytes read:1735053407244,File
      // Systems.Local bytes written:2610106384012,File Systems.HDFS bytes
      // read:801605644910,File Systems.HDFS bytes written:44135800,
      // // Job Counters .Launched map tasks:5912,Job Counters .Launched
      // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
      // Counters .Rack-local map tasks:316,Map-Reduce Framework.
      // // Map input records:9410696067,Map-Reduce Framework.Map output
      // records:9410696067,Map-Reduce Framework.Map input
      // bytes:801599188816,Map-Reduce Framework.Map output
      // bytes:784427968116,
      // // Map-Reduce Framework.Combine input records:0,Map-Reduce
      // Framework.Combine output records:0,Map-Reduce Framework.Reduce
      // input groups:477265,Map-Reduce Framework.Reduce input
      // records:739000,
      // // Map-Reduce Framework.Reduce output records:739000"
      //         
      // }
      // else
      if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("START_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_000018"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0"
        // START_TIME="1208760437531"
        // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_005494"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0"
        // TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760624124"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("FINISH_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/E");
        output.collect(key, record);
      }

      else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("START_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // START_TIME="1208760454885"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167"
        // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395"
        // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/E");
        output.collect(key, record);

        // SORT
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SORT_FINISHED", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/E");
        output.collect(key, record);

        // Reduce
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/E");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job")
          && keys.containsKey("COUNTERS")) {
        // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906816"
        // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912"
        // FINISHED_REDUCES="739" FAILED_MAPS="0" FAILED_REDUCES="0"
        // COUNTERS="File Systems.Local bytes read:1735053407244,File
        // Systems.Local bytes written:2610106384012,File Systems.HDFS
        // bytes read:801605644910,File Systems.HDFS bytes
        // written:44135800,
        // Job Counters .Launched map tasks:5912,Job Counters .Launched
        // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
        // Counters .Rack-local map tasks:316,Map-Reduce Framework.
        // Map input records:9410696067,Map-Reduce Framework.Map output
        // records:9410696067,Map-Reduce Framework.Map input
        // bytes:801599188816,Map-Reduce Framework.Map output
        // bytes:784427968116,
        // Map-Reduce Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0,Map-Reduce
        // Framework.Reduce input groups:477265,Map-Reduce
        // Framework.Reduce input records:739000,
        // Map-Reduce Framework.Reduce output records:739000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "MRJobCounters");
        extractCounters(record, keys.get("COUNTERS"));

        String jobId = keys.get("JOBID").replace("_", "").substring(3);
        record.add("JobId", "" + jobId);

        // FIXME validate this when HodId will be available
        if (keys.containsKey("HODID")) {
          record.add("HodId", keys.get("HODID"));
        }

        // log.info("MRJobCounters +1");
        output.collect(key, record);
      }

      if (keys.containsKey("TASK_TYPE")
          && keys.containsKey("COUNTERS")
          && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get(
              "TASK_TYPE").equalsIgnoreCase("MAP"))) {
        // MAP
        // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP"
        // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883"
        // COUNTERS="File Systems.Local bytes read:159265655,File
        // Systems.Local bytes written:318531310,
        // File Systems.HDFS bytes read:145882417,Map-Reduce
        // Framework.Map input records:1706604,
        // Map-Reduce Framework.Map output records:1706604,Map-Reduce
        // Framework.Map input bytes:145882057,
        // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce
        // Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0"

        // REDUCE
        // Task TASKID="tip_200804210403_0005_r_000524"
        // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760877072"
        // COUNTERS="File Systems.Local bytes read:1179319677,File
        // Systems.Local bytes written:1184474889,File Systems.HDFS
        // bytes written:59021,
        // Map-Reduce Framework.Reduce input groups:684,Map-Reduce
        // Framework.Reduce input records:1000,Map-Reduce
        // Framework.Reduce output records:1000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime");
        extractCounters(record, keys.get("COUNTERS"));
        record.add("JOBID", keys.get("JOBID"));
        record.add("TASKID", keys.get("TASKID"));
        record.add("TASK_TYPE", keys.get("TASK_TYPE"));

        // log.info("MR_Graph +1");
        output.collect(key, record);

      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) {
    try {
      String action = key.getKey();
      int count = 0;

      ChukwaRecord record = null;
      while (values.hasNext()) {
        record = values.next();
        if (record.containsField("START_TIME")) {
          count++;
        } else {
          count--;
        }
      }
      ChukwaRecordKey newKey = new ChukwaRecordKey();
      newKey.setKey("" + record.getTime());
      newKey.setReduceType("MSSRGraph");
      ChukwaRecord newRecord = new ChukwaRecord();
      newRecord.add(Record.tagsField, record.getValue(Record.tagsField));
      newRecord.setTime(record.getTime());
      newRecord.add("count", "" + count);
      newRecord.add("JOBID", record.getValue("JOBID"));
      if (action.indexOf("JobLogHist/Map/") >= 0) {
        newRecord.add("type", "MAP");
      } else if (action.indexOf("JobLogHist/SHUFFLE/") >= 0) {
        newRecord.add("type", "SHUFFLE");
      } else if (action.indexOf("JobLogHist/SORT/") >= 0) {
        newRecord.add("type", "SORT");
      } else if (action.indexOf("JobLogHist/REDUCE/") >= 0) {
        newRecord.add("type", "REDUCE");
      }

      output.collect(newKey, newRecord);
    } catch (IOException e) {
      log.warn("Unable to collect output in JobLogHistoryReduceProcessor ["
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  @Override
  public void process(ChukwaRecordKey key, Iterator<ChukwaRecord> values,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) {
    try {

      ChukwaRecord record = null;
      ChukwaRecord newRecord = new ChukwaRecord();

      while (values.hasNext()) {
        record = values.next();
        newRecord.setTime(record.getTime());

        if (record.containsField("IFACE")) {
          if (record.containsField("rxpck/s")) {
            if (record.containsField("rxbyt/s")
                && record.containsField("txbyt/s")) {
              double netBusyPcnt = 0, netRxByts = 0, netTxByts = 0, netSpeed = 128000000.00;
              netRxByts = Double.parseDouble(record.getValue("rxbyt/s"));
              netTxByts = Double.parseDouble(record.getValue("txbyt/s"));
              netBusyPcnt = (netRxByts / netSpeed * 100)
                  + (netTxByts / netSpeed * 100);
              record.add(record.getValue("IFACE") + "_busy_pcnt", ""
                  + netBusyPcnt);
              record.add("csource", record.getValue("csource"));
            }
            record.add(record.getValue("IFACE") + ".rxbyt/s", record
                .getValue("rxbyt/s"));
            record.add(record.getValue("IFACE") + ".rxpck/s", record
                .getValue("rxpck/s"));
            record.add(record.getValue("IFACE") + ".txbyt/s", record
                .getValue("txbyt/s"));
            record.add(record.getValue("IFACE") + ".txpck/s", record
                .getValue("txpck/s"));
            record.removeValue("rxbyt/s");
            record.removeValue("rxpck/s");
            record.removeValue("txbyt/s");
            record.removeValue("txpck/s");
          }
          if (record.containsField("rxerr/s")) {
            record.add(record.getValue("IFACE") + ".rxerr/s", record
                .getValue("rxerr/s"));
            record.add(record.getValue("IFACE") + ".rxdrop/s", record
                .getValue("rxdrop/s"));
            record.add(record.getValue("IFACE") + ".txerr/s", record
                .getValue("txerr/s"));
            record.add(record.getValue("IFACE") + ".txdrop/s", record
                .getValue("txdrop/s"));
            record.removeValue("rxerr/s");
            record.removeValue("rxdrop/s");
            record.removeValue("txerr/s");
            record.removeValue("txdrop/s");
          }
          record.removeValue("IFACE");
        }

        if (record.containsField("Device:")) {
          record.add(record.getValue("Device:") + ".r/s", record
              .getValue("r/s"));
          record.add(record.getValue("Device:") + ".w/s", record
              .getValue("w/s"));
          record.add(record.getValue("Device:") + ".rkB/s", record
              .getValue("rkB/s"));
          record.add(record.getValue("Device:") + ".wkB/s", record
              .getValue("wkB/s"));
          record.add(record.getValue("Device:") + ".%util", record
              .getValue("%util"));
          record.removeValue("r/s");
          record.removeValue("w/s");
          record.removeValue("rkB/s");
          record.removeValue("wkB/s");
          record.removeValue("%util");
          record.removeValue("Device:");
        }

        if (record.containsField("swap_free")) {
          float swapUsedPcnt = 0, swapUsed = 0, swapTotal = 0;
          swapUsed = Long.parseLong(record.getValue("swap_used"));
          swapTotal = Long.parseLong(record.getValue("swap_total"));
          swapUsedPcnt = swapUsed / swapTotal * 100;
          record.add("swap_used_pcnt", "" + swapUsedPcnt);
          record.add("csource", record.getValue("csource"));
        }

        if (record.containsField("mem_used")) {
          double memUsedPcnt = 0, memTotal = 0, memUsed = 0;
          memTotal = Double.parseDouble(record.getValue("mem_total"));
          memUsed = Double.parseDouble(record.getValue("mem_used"));
          memUsedPcnt = memUsed / memTotal * 100;
          record.add("mem_used_pcnt", "" + memUsedPcnt);
          record.add("csource", record.getValue("csource"));
        }

        if (record.containsField("mem_buffers")) {
          double memBuffersPcnt = 0, memTotal = 0, memBuffers = 0;
          memTotal = Double.parseDouble(record.getValue("mem_total"));
          memBuffers = Double.parseDouble(record.getValue("mem_buffers"));
          memBuffersPcnt = memBuffers / memTotal * 100;
          record.add("mem_buffers_pcnt", "" + memBuffersPcnt);
          record.add("csource", record.getValue("csource"));
        }

        // Copy over all fields
        String[] fields = record.getFields();
        for (String f : fields) {
          newRecord.add(f, record.getValue(f));
        }
      }
      record.add("capp", "systemMetrics");
      output.collect(key, newRecord);
    } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  };

  public void testLog4JMetricsContextChukwaRecord() throws Throwable {
    {
      Log4JMetricsContextChukwaRecord rec = new Log4JMetricsContextChukwaRecord(chukwaQueueLog[0]);
      ChukwaRecord chukwaRecord = rec.getChukwaRecord();
      assertEquals("chunkQueue", rec.getRecordType());
      assertEquals("1241568021982", chukwaRecord.getValue("chukwa_timestamp"));
      assertEquals((1241568021982l/60000)*60000, rec.getTimestamp());
      assertEquals("94", chukwaRecord.getValue("queueSize"));
    }
   
    {
      Log4JMetricsContextChukwaRecord rec = new Log4JMetricsContextChukwaRecord(chukwaAgentLog[3]);
      assertEquals("CA_chukwaAgent", rec.getRecordType());
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  @Override
  protected void parse(String recordEntry,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) {
    try {
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, recordEntry, archiveKey
          .getTimePartition(), chunk.getDataType());
      output.collect(key, record);
    } catch (IOException e) {
      log.warn("Unable to collect output in DefaultProcessor [" + recordEntry
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      throws Throwable {
    LogEntry log = new LogEntry(recordEntry);
    PsOutput ps = new PsOutput(log.getBody());
    for (HashMap<String, String> processInfo : ps.getProcessList()) {
      key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, null, log.getDate().getTime(), reduceType);
      for (Entry<String, String> entry : processInfo.entrySet()) {
        record.add(entry.getKey(), entry.getValue());
      }
      output.collect(key, record);
    }
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.