Examples of ChukwaRecord


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      else {
        dStr = recordEntry;
      }

      Date d = sdf.parse(dStr);
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, recordEntry, d.getTime(), chunk
          .getDataType());
      output.collect(key, record);
    } catch (ParseException e) {
      log.warn("Unable to parse the date in DefaultProcessor [" + recordEntry
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      convertDate.set(Calendar.YEAR, year);
      if(convertDate.getTimeInMillis() > Calendar.getInstance().getTimeInMillis()) {
        convertDate.set(Calendar.YEAR, year - 1);
      }

      ChukwaRecord record = new ChukwaRecord();
      buildGenericRecord(record, recordEntry, convertDate.getTime().getTime(),
          reduceType);
      output.collect(key, record);
    } catch (ParseException e) {
      e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      String body = recordEntry.substring(idx + 1);
      body = body.replaceAll("\n", "");
      Date d = sdf.parse(dStr);
      String[] kvpairs = body.split(", ");

      ChukwaRecord record = new ChukwaRecord();
      String kvpair = null;
      String[] halves = null;
      boolean containRecord = false;
      for (int i = 0; i < kvpairs.length; ++i) {
        kvpair = kvpairs[i];
        if (kvpair.indexOf("=") >= 0) {
          halves = kvpair.split("=");
          record.add(halves[0], halves[1]);
          containRecord = true;
        }
      }
      if (record.containsField("Machine")) {
        buildGenericRecord(record, null, d.getTime(), "HodMachine");
      } else {
        buildGenericRecord(record, null, d.getTime(), "HodJob");
      }
      if (containRecord) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) {
    try {
      reporter.incrCounter("DemuxError", "count", 1);
      reporter.incrCounter("DemuxError", chunk.getDataType() + "Count", 1);

      ChukwaRecord record = new ChukwaRecord();
      long ts = System.currentTimeMillis();
      Calendar calendar = Calendar.getInstance();
      calendar.setTimeInMillis(ts);
      calendar.set(Calendar.MINUTE, 0);
      calendar.set(Calendar.SECOND, 0);
      calendar.set(Calendar.MILLISECOND, 0);
      ChukwaRecordKey key = new ChukwaRecordKey();
      key.setKey("" + calendar.getTimeInMillis() + "/" + chunk.getDataType()
          + "/" + chunk.getSource() + "/" + ts);
      key.setReduceType(chunk.getDataType() + "InError");

      record.setTime(ts);

      record.add(Record.tagsField, chunk.getTags());
      record.add(Record.sourceField, chunk.getSource());
      record.add(Record.applicationField, chunk.getStreamName());

      DataOutputBuffer ob = new DataOutputBuffer(chunk
          .getSerializedSizeEstimate());
      chunk.write(ob);
      record.add(Record.chunkDataField, new String(ob.getData()));
      record.add(Record.chunkExceptionField, ExceptionUtil
          .getStackTrace(throwable));
      output.collect(key, record);

      return record;
    } catch (Throwable e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      }

      String[] values = null;

      // Data
      ChukwaRecord record = null;

      for (int i = 1; i < lines.length; i++) {
        values = lines[i].split("[\\s]++");
        key = new ChukwaRecordKey();
        record = new ChukwaRecord();
        this.buildGenericRecord(record, null, d.getTime(), "Df");

        record.add(headerCols[0], values[0]);
        record.add(headerCols[1], values[1]);
        record.add(headerCols[2], values[2]);
        record.add(headerCols[3], values[3]);
        record.add(headerCols[4], values[4]
            .substring(0, values[4].length() - 1)); // Remove %
        record.add(headerCols[5], values[5]);

        output.collect(key, record);
      }

      // log.info("DFProcessor output 1 DF record");
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    int totalFreeNode = 0;
    int totalUsedNode = 0;
    int totalDownNode = 0;

    String body = null;
    ChukwaRecord record = null;

    try {

      String dStr = recordEntry.substring(0, 23);
      int start = 24;
      int idx = recordEntry.indexOf(' ', start);
      // String level = recordEntry.substring(start, idx);
      start = idx + 1;
      idx = recordEntry.indexOf(' ', start);
      // String className = recordEntry.substring(start, idx-1);
      body = recordEntry.substring(idx + 1);

      Date d = sdf.parse(dStr);

      String[] lines = body.split("\n");
      while (i < lines.length) {
        while ((i < lines.length) && (lines[i].trim().length() > 0)) {
          sb.append(lines[i].trim()).append("\n");
          i++;
        }

        if ((i < lines.length) && (lines[i].trim().length() > 0)) {
          throw new PbsInvalidEntry(recordEntry);
        }

        // Empty line
        i++;

        if (sb.length() > 0) {
          body = sb.toString();
          // Process all entries for a machine
          // System.out.println("=========>>> Record [" + body+ "]");

          record = new ChukwaRecord();
          key = new ChukwaRecordKey();

          buildGenericRecord(record, null, d.getTime(), machinePBSRecordType);
          parsePbsRecord(body, record);

          // Output PbsNode record for 1 machine
          output.collect(key, record);
          // log.info("PbsNodeProcessor output 1 sub-record");

          // compute Node Activity information
          nodeActivityStatus = record.getValue("state");
          if (nodeActivityStatus != null) {
            if (nodeActivityStatus.equals("free")) {
              totalFreeNode++;
              sbFreeMachines.append(record.getValue("Machine")).append(",");
            } else if (nodeActivityStatus.equals("job-exclusive")) {
              totalUsedNode++;
              sbUsedMachines.append(record.getValue("Machine")).append(",");
            } else {
              totalDownNode++;
              sbDownMachines.append(record.getValue("Machine")).append(",");
            }
          }
          sb = new StringBuilder();
        }
      }

      // End of parsing

      record = new ChukwaRecord();
      key = new ChukwaRecordKey();
      buildGenericRecord(record, null, d.getTime(), "NodeActivity");

      record.setTime(d.getTime());
      record.add("used", "" + totalUsedNode);
      record.add("free", "" + totalFreeNode);
      record.add("down", "" + totalDownNode);
      record.add("usedMachines", sbUsedMachines.toString());
      record.add("freeMachines", sbFreeMachines.toString());
      record.add("downMachines", sbDownMachines.toString());

      output.collect(key, record);
      // log.info("PbsNodeProcessor output 1 NodeActivity");
    } catch (ParseException e) {
      e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  @Override
  public void parse(String line,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) {
    log.info("record: [" + line + "] type[" + chunk.getDataType() + "]");

    ChukwaRecord record = new ChukwaRecord();
    buildGenericRecord(record, line, System.currentTimeMillis(), recordType);
    key.setKey("" + chunk.getSeqID());
    try {
      output.collect(key, record);
    } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      }
      line.setLogType("JobData");
    }
   
    key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    this.buildGenericRecord(record, null, -1l, line.getLogType());
   
    for (Entry<String, String> entry : line.entrySet()) {
      record.add(entry.getKey(), entry.getValue());
    }
   
    for(Entry<String, Long> entry : line.getCounterHash().flat().entrySet()) {
      record.add(entry.getKey(), entry.getValue().toString());
    }
   
    long timestamp = line.getTimestamp();
    record.setTime(timestamp);
    key.setKey(getKey(timestamp, line.getJobId()));
    output.collect(key, record);
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      Date d = sdf.parse(dStr);

      start = body.indexOf('{');
      JSONObject json = (JSONObject) JSONValue.parse(body.substring(start));

      ChukwaRecord record = new ChukwaRecord();
      StringBuilder datasource = new StringBuilder();
      String contextName = null;
      String recordName = null;

      Iterator<String> ki = json.keySet().iterator();
      while (ki.hasNext()) {
        String keyName = ki.next();
        if (chukwaTimestampField.intern() == keyName.intern()) {
          d = new Date((Long) json.get(keyName));
          Calendar cal = Calendar.getInstance();
          cal.setTimeInMillis(d.getTime());
          cal.set(Calendar.SECOND, 0);
          cal.set(Calendar.MILLISECOND, 0);
          d.setTime(cal.getTimeInMillis());
        } else if (contextNameField.intern() == keyName.intern()) {
          contextName = (String) json.get(keyName);
        } else if (recordNameField.intern() == keyName.intern()) {
          recordName = (String) json.get(keyName);
          record.add(keyName, json.get(keyName).toString());
        } else {
          if(json.get(keyName)!=null) {
            record.add(keyName, json.get(keyName).toString());
          }
        }
      }
      if(contextName!=null) {
        datasource.append(contextName);
        datasource.append("_");
      }
      datasource.append(recordName);
      record.add("cluster", chunk.getTag("cluster"));
      if(contextName!=null && contextName.equals("jvm")) {
        buildJVMRecord(record, d.getTime(), datasource.toString());       
      } else {
        buildGenericRecord(record, null, d.getTime(), datasource.toString());
      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    log.info("File: [" +  file + "]" + fs.exists(new Path(file)));
    try {
      reader = new SequenceFile.Reader(fs, new Path(file), conf);

      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      StringBuilder sb = new StringBuilder();
      while (reader.next(key, record)) {
      
        sb.append("===== KEY   =====");

        sb.append("DataType: " + key.getReduceType());
        sb.append("Key: " + key.getKey());
        sb.append("===== Value =====");

        String[] fields = record.getFields();
        Arrays.sort(fields );
        sb.append("Timestamp : " + record.getTime());
        for (String field : fields) {
          sb.append("[" + field + "] :" + record.getValue(field));
        }
      }
     
      return sb.toString();
    } catch (Throwable e) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.