Package org.apache.hadoop.chukwa.util

Examples of org.apache.hadoop.chukwa.util.DatabaseWriter.query()


           DatabaseWriter db = new DatabaseWriter(cluster);
           Calendar c = Calendar.getInstance();
           long now = c.getTimeInMillis();
           String[] tableName = dbc.findTableName("system_metrics", now, now);
           String query = "select unix_TIMESTAMP(now()) - unix_timestamp(max(timestamp)) as delay from "+tableName[0]+" ;";
           ResultSet rs = db.query(query);
           while(rs.next()) {
               long delay = rs.getLong(1);
               if(delay>600) {
                   log.error("Chukwa: "+cluster+": No new data for the past 30 minutes for system metrics");                  
                   error=true;
View Full Code Here


                   log.error("Chukwa: "+cluster+": No new data for the past 30 minutes for system metrics");                  
                   error=true;
               }
           }
           query = "select count(*) as UpdatesPerHr from "+tableName[0]+" where Timestamp > date_sub(now(), interval 60 minute) ;";
           rs = db.query(query);
           while(rs.next()) {
               updates = rs.getLong(1);
               if(updates==0) {
                   log.error("Chukwa: "+cluster+": No system metrics data received for the past 60 minutes");                  
                   error=true;
View Full Code Here

                   error=true;
               }
           }
           String[] hodTableNames = dbc.findTableName("HodJob", now, now);
           query = "select count(*) as UpdatesPerHr from "+hodTableNames[0]+" where StartTime > date_sub(now(), interval 60 minute) ;";
           rs = db.query(query);          
           while(rs.next()) {
               long updatesHod = rs.getLong(1);
               if(updatesHod==0) {
                   log.error("Chukwa: "+cluster+": No hod job data received for the past 60 minutes");
               }
View Full Code Here

                   log.error("Chukwa: "+cluster+": No hod job data received for the past 60 minutes");
               }
           }
           String[] mrTableNames = dbc.findTableName("mr_job", now, now);
           query = "select count(*) as UpdatesPerHr from "+mrTableNames+" where FINISH_TIME > date_sub(now(), interval 1440 minute) ;";
           rs = db.query(query);                     
           while(rs.next()) {
               long updatesMR = rs.getLong(1);
               if(updatesMR==0) {
                   log.error("MDL: no map reduce job data received for the past day.");
                   error=true;
View Full Code Here

  if (timestamp != null) {
      // get simple value
            try {
    String query = getSingleQuery(HadoopRpcHome.table,"timestamp",timestamp);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        HadoopRpc obj = createHadoopRpc(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    Map<String, String> criteriaMap = new HashMap<String,String>();
    criteriaMap.put("timestamp",convertLongToDateString(Long.parseLong(timestamp)));
    criteriaMap.put("host",host);

    String query = getCriteriaQuery(HadoopRpcHome.table,criteriaMap);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        HadoopRpc obj = createHadoopRpc(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

  Collection<HadoopRpc> collection = new Vector<HadoopRpc>();

  try {
      String query = getTimeBetweenQuery(HadoopRpcHome.table,starttime,endtime);     
      ResultSet rs = dbw.query(query);
      while (rs.next()) {
    HadoopRpc obj = createHadoopRpc(rs);
    collection.add(obj);
      }
  } catch (Exception e) {
View Full Code Here

  if (timestamp != null) {
      // get simple value
            try {
    String query = getSingleQuery(DfsThroughputHome.table,"timestamp",timestamp);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        DfsThroughput obj = createDfsThroughput(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    Map<String, String> criteriaMap = new HashMap<String,String>();
    criteriaMap.put("timestamp",convertLongToDateString(Long.parseLong(timestamp)));
    criteriaMap.put("host",host);

    String query = getCriteriaQuery(DfsThroughputHome.table,criteriaMap);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        DfsThroughput obj = createDfsThroughput(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

  Collection<DfsThroughput> collection = new Vector<DfsThroughput>();

  try {
      String query = getTimeBetweenQuery(DfsThroughputHome.table,starttime,endtime);     
      ResultSet rs = dbw.query(query);
      while (rs.next()) {
    DfsThroughput obj = createDfsThroughput(rs);
    collection.add(obj);
      }
  } catch (Exception e) {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.