Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.JobConf.addResource()


    }

    public static JobConf config() {// Hadoop集群的远程配置信息
        JobConf conf = new JobConf(PageRankJob.class);
        conf.setJobName("PageRank");
        conf.addResource("classpath:/hadoop/core-site.xml");
        conf.addResource("classpath:/hadoop/hdfs-site.xml");
        conf.addResource("classpath:/hadoop/mapred-site.xml");
        return conf;
    }
View Full Code Here


    public static JobConf config() {// Hadoop集群的远程配置信息
        JobConf conf = new JobConf(PageRankJob.class);
        conf.setJobName("PageRank");
        conf.addResource("classpath:/hadoop/core-site.xml");
        conf.addResource("classpath:/hadoop/hdfs-site.xml");
        conf.addResource("classpath:/hadoop/mapred-site.xml");
        return conf;
    }

    public static String scaleFloat(float f) {// 保留6位小数
View Full Code Here

    public static JobConf config() {// Hadoop集群的远程配置信息
        JobConf conf = new JobConf(PageRankJob.class);
        conf.setJobName("PageRank");
        conf.addResource("classpath:/hadoop/core-site.xml");
        conf.addResource("classpath:/hadoop/hdfs-site.xml");
        conf.addResource("classpath:/hadoop/mapred-site.xml");
        return conf;
    }

    public static String scaleFloat(float f) {// 保留6位小数
        DecimalFormat df = new DecimalFormat("##0.000000");
View Full Code Here

        // 2. hadoop-site.xml: Site-specific configuration for a given hadoop installation.
        // Now add the settings from "properties" object to override any existing properties
        // All of the above is accomplished in the method call below
          
        JobConf jobConf = new JobConf();
        jobConf.addResource("pig-cluster-hadoop-site.xml");
           
        //the method below alters the properties object by overriding the
        //hadoop properties with the values from properties and recomputing
        //the properties
        recomputeProperties(jobConf, properties);
View Full Code Here

                  "If you plan to use local mode, please put -x local option in command line",
                  4010);
            }

            jc = new JobConf();
            jc.addResource("pig-cluster-hadoop-site.xml");
           
            // Trick to invoke static initializer of DistributedFileSystem to add hdfs-default.xml
            // into configuration
            new DistributedFileSystem();
           
View Full Code Here

            //the properties
            recomputeProperties(jc, properties);
        } else {
            // If we are running in local mode we dont read the hadoop conf file
            jc = new JobConf(false);
            jc.addResource("core-default.xml");
            jc.addResource("mapred-default.xml");
            recomputeProperties(jc, properties);
           
            properties.setProperty(JOB_TRACKER_LOCATION, LOCAL );
            properties.setProperty(FILE_SYSTEM_LOCATION, "file:///");
View Full Code Here

            recomputeProperties(jc, properties);
        } else {
            // If we are running in local mode we dont read the hadoop conf file
            jc = new JobConf(false);
            jc.addResource("core-default.xml");
            jc.addResource("mapred-default.xml");
            recomputeProperties(jc, properties);
           
            properties.setProperty(JOB_TRACKER_LOCATION, LOCAL );
            properties.setProperty(FILE_SYSTEM_LOCATION, "file:///");
        }
View Full Code Here

          "test.build.data", "/tmp"));
   
      String[] sortArgs = { DEMUX_INPUT_PATH.toString(), DEMUX_OUTPUT_PATH.toString() };
      //      JobConf job = mr.createJobConf();
      JobConf job = new JobConf(new ChukwaConfiguration(), Demux.class);
      job.addResource(System.getenv("CHUKWA_CONF_DIR")+File.separator+"chukwa-demux-conf.xml");
      job.setJobName("Chukwa-Demux_" + day.format(new Date()));
      job.setInputFormat(SequenceFileInputFormat.class);
      job.setMapperClass(Demux.MapClass.class);
      job.setPartitionerClass(ChukwaRecordPartitioner.class);
      job.setReducerClass(Demux.ReduceClass.class);
View Full Code Here

    log.info("Testing FSMBuilder (Job History only)");
    System.out.println("In JobHistory020");
    // Run FSMBuilder on Demux output
    try {
      JobConf job = new JobConf(new ChukwaConfiguration(), FSMBuilder.class);
      job.addResource(System.getenv("CHUKWA_CONF_DIR")+File.separator+"chukwa-demux-conf.xml");
      job.setJobName("Chukwa-FSMBuilder_" + day.format(new Date()));
      job.setMapperClass(JobHistoryTaskDataMapper.class);
      job.setPartitionerClass(FSMIntermedEntryPartitioner.class);
      job.setReducerClass(FSMBuilder.FSMReducer.class);
      job.setMapOutputValueClass(FSMIntermedEntry.class);
View Full Code Here

    System.out.println("In ClientTrace020");
    // Run FSMBuilder on Demux output
    try {
      // Process TaskTracker shuffle clienttrace entries first
      JobConf job = new JobConf(new ChukwaConfiguration(), FSMBuilder.class);
      job.addResource(System.getenv("CHUKWA_CONF_DIR")+File.separator+"chukwa-demux-conf.xml");
      job.setJobName("Chukwa-FSMBuilder_" + day.format(new Date()));
      job.setMapperClass(TaskTrackerClientTraceMapper.class);
      job.setPartitionerClass(FSMIntermedEntryPartitioner.class);
      job.setReducerClass(FSMBuilder.FSMReducer.class);
      job.setMapOutputValueClass(FSMIntermedEntry.class);
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.