Examples of run()


Examples of org.apache.hadoop.mrunit.mapreduce.ReduceDriver.run()

      list.add(createWritable(value, reducer.getValueInType()));
    }
    ReduceDriver reduceDriver = new ReduceDriver<Text, LongWritable, Text, LongWritable>();
    reduceDriver.withInput(createWritable(key, reducer.getKeyInType()), list);
    reduceDriver.setReducer(reducer);
    List results = reduceDriver.run();
    Collections.sort(results, PairComparer.INSTANCE);
    String header = String.format("(%s, %s)\r\n\r\n -> reduces via %s to -> \r\n", key, list, reducer.getClass()
        .getSimpleName());
    Approvals.verifyAll(header, results, Echo.INSTANCE);
  }
View Full Code Here

Examples of org.apache.hadoop.oncrpc.RegistrationClient.run()

  }

  static void testRequest(XDR request) {
    RegistrationClient registrationClient = new RegistrationClient("localhost",
        Nfs3Constant.SUN_RPCBIND, request);
    registrationClient.run();
  }

  static class WriteHandler extends SimpleTcpClientHandler {

    public WriteHandler(XDR request) {
View Full Code Here

Examples of org.apache.hadoop.oncrpc.SimpleTcpServer.run()

  private void startTCPServer() {
    SimpleTcpServer tcpServer = new SimpleTcpServer(rpcProgram.getPort(),
        rpcProgram, 0);
    rpcProgram.startDaemons();
    tcpServer.run();
    nfsBoundPort = tcpServer.getBoundPort();
  }

  /**
   * Priority of the nfsd shutdown hook.
View Full Code Here

Examples of org.apache.hadoop.oncrpc.SimpleUdpServer.run()

  /* Start UDP server */
  private void startUDPServer() {
    SimpleUdpServer udpServer = new SimpleUdpServer(rpcProgram.getPort(),
        rpcProgram, 1);
    rpcProgram.startDaemons();
    udpServer.run();
    udpBoundPort = udpServer.getBoundPort();
  }

  /* Start TCP server */
  private void startTCPServer() {
View Full Code Here

Examples of org.apache.hadoop.raid.tools.RSBenchmark.run()

            encodeMethod.toString(),
            dpos,
            dlen,
            elen,
            useNative);
    rsBen.run();
  }

  /**
   * Apply operation specified by 'cmd' on all parameters
   * starting from argv[startindex].
View Full Code Here

Examples of org.apache.hadoop.streaming.DumpTypedBytes.run()

        writer.close();
      }

      String[] args = new String[1];
      args[0] = "/typedbytestest";
      int ret = dumptb.run(args);
      assertEquals("Return value != 0.", 0, ret);

      ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
      TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(in));
      int counter = 0;
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp.run()

    exec("offline " + table, true);
    String export = folder.newFolder().toString();
    exec("exporttable -t " + table + " " + export, true);
    DistCp cp = newDistCp();
    String import_ = folder.newFolder().toString();
    cp.run(new String[] {"-f", export + "/distcp.txt", import_});
    exec("importtable " + table2 + " " + import_, true);
    exec("config -t " + table2 + " -np", true, "345M", true);
    exec("getsplits -t " + table2, true, "row5", true);
    exec("constraint --list -t " + table2, true, "VisibilityConstraint=1", true);
    exec("onlinetable " + table, true);
View Full Code Here

Examples of org.apache.hadoop.util.ProgramDriver.run()

          "Identify all occurences of lines in file1 which also occur in file2");
      pgd.addClass("joindatagen", JoinDataGen.class,
          "Generate data to run the joinexample");
      pgd.addClass("joinvalidate", JoinValidate.class,
          "Validate data generated by joinexample and joindatagen");
      exitCode = pgd.run(argv);
    } catch(Throwable e){
      e.printStackTrace();
    }

    System.exit(exitCode);
View Full Code Here

Examples of org.apache.hadoop.util.Tool.run()

    public static void main(String... args) throws Exception {
        LOG.info("[OT-CLEAN-I00000] Start Hadoop FS cleaning tool");
        long start = System.currentTimeMillis();
        Tool tool = new Clean();
        tool.setConf(new Configuration());
        int exit = tool.run(args); // no generic options
        long end = System.currentTimeMillis();
        LOG.info(MessageFormat.format(
                "[OT-CLEAN-I00999] Finish Hadoop FS cleaning tool (exit-code={0}, elapsed={1}ms)",
                exit,
                end - start));
View Full Code Here

Examples of org.apache.hadoop.vaidya.DiagnosticTest.run()

        Element cn = (Element)cNodeList.item(0);
        String className = cn.getFirstChild().getNodeValue().trim();
        Class rc = Class.forName(className);
        DiagnosticTest test = (DiagnosticTest)rc.newInstance();
        test.initGlobals(pa.getJobExecutionStatistics(), (Element)list.item(i));
        test.run();
        NodeList nodelist = pa.getReport().getElementsByTagName("PostExPerformanceDiagnosticReport");
        Element root = (Element)nodelist.item(0);
        //root.appendChild(rule.getReportElement(pa.getReport(), root));
        Element re = test.getReportElement(pa.getReport(), root);
        //XMLUtils.printDOM(re);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.