Package org.apache.pig.impl

Examples of org.apache.pig.impl.PigContext.connect()


        String query = "A = LOAD '" + INPUT_FILE + "';" +
                       "B = LOAD '" + INPUT_FILE + "';" +
                       "C = join A by $0, B by $0 using 'merge' parallel 50;" +
                       "store C into 'out';";
  PigContext pc = new PigContext(ExecType.MAPREDUCE,cluster.getProperties());
    pc.connect();
  MROperPlan mro = Util.buildMRPlan(Util.buildPp(pigServer, query),pc);
        Assert.assertEquals(1,mro.getRoots().get(0).getRequestedParallelism());
    }

    @Test
View Full Code Here


    Configuration conf = null;

    @Before
    public void setUp() throws Exception {
        PigContext pc = new PigContext(ExecType.LOCAL, new Properties());
        pc.connect();
        conf = new Configuration(
                ConfigurationUtil.toConfiguration(pc.getFs().getConfiguration())
                );
    }
View Full Code Here

     */
    @Test
    public void testGroupConstWithParallel() throws Throwable {
        PigContext pc = new PigContext(ExecType.MAPREDUCE, cluster.getProperties());
        pc.defaultParallel = 100;
        pc.connect();
       
        String query = "a = load 'input';\n" + "b = group a by 1;" + "store b into 'output';";
        PigServer pigServer = new PigServer( ExecType.MAPREDUCE, cluster.getProperties() );
        PhysicalPlan pp = Util.buildPp( pigServer, query );
       
View Full Code Here

     */
    @Test
    public void testGroupNonConstWithParallel() throws Throwable {
        PigContext pc = new PigContext(ExecType.MAPREDUCE, cluster.getProperties());
        pc.defaultParallel = 100;
        pc.connect();
       
        PigServer pigServer = new PigServer( ExecType.MAPREDUCE, cluster.getProperties() );
        String query =  "a = load 'input';\n" + "b = group a by $0;" + "store b into 'output';";
       
        PhysicalPlan pp = Util.buildPp( pigServer, query );
View Full Code Here

    @Test
    public void testNonCollectableLoader() throws Exception{
        String query = "A = LOAD '" + INPUT_FILE + "' as (id, name, grade);" +
                       "B = group A by id using 'collected';";
        PigContext pc = new PigContext(ExecType.MAPREDUCE,cluster.getProperties());
        pc.connect();
        try {
            Util.buildMRPlan(Util.buildPp(pigServer, query),pc)
            Assert.fail("Must throw MRCompiler Exception");
        } catch (Exception e) {
            Assert.assertTrue(e instanceof MRCompilerException);
View Full Code Here

    final String testUDFFileName = className+".class";

    // JobControlCompiler setup
    PigServer pigServer = new PigServer(ExecType.MAPREDUCE);
    PigContext pigContext = pigServer.getPigContext();
    pigContext.connect();
    pigContext.addJar(tmpFile.getAbsolutePath());
    JobControlCompiler jobControlCompiler = new JobControlCompiler(pigContext, CONF);
    MROperPlan plan = new MROperPlan();
    MapReduceOper mro = new MapReduceOper(new OperatorKey());
    mro.UDFs = new HashSet<String>();
View Full Code Here

        zipArchives.add(textFile);
        final List<File> tarArchives = createFiles(".tgz", ".tar.gz", ".tar");

        final PigServer pigServer = new PigServer(ExecType.MAPREDUCE);
        final PigContext pigContext = pigServer.getPigContext();
        pigContext.connect();
        pigContext.getProperties().put("pig.streaming.ship.files",
                StringUtils.join(zipArchives, ","));
        pigContext.getProperties().put("pig.streaming.cache.files",
                StringUtils.join(tarArchives, ","));
        final JobControlCompiler jobControlCompiler = new JobControlCompiler(
View Full Code Here

        Configuration conf = new Configuration(false);
        conf.set("foo", "bar");
        PigContext pigContext = new PigContext(ExecType.MAPREDUCE, conf);
        // This should fail as pig expects Hadoop configs are present in
        // classpath.
        pigContext.connect();
    }
   
    @Test
    public void testJobConfGenerationWithUserConfigs() throws ExecException {
        Configuration conf = new Configuration(false);
View Full Code Here

        // and not the classpath
        conf.set("pig.use.overriden.hadoop.configs", "true");
        conf.set("mapred.job.tracker", "host:12345");
        conf.set("apache", "pig");
        PigContext pigContext = new PigContext(ExecType.MAPREDUCE, conf);
        pigContext.connect();
        JobConf jc = ((MRExecutionEngine)pigContext.getExecutionEngine()).getJobConf();
        Assert.assertEquals(jc.get("mapred.job.tracker"), "host:12345");
        Assert.assertEquals(jc.get("apache"), "pig");
    }
}
View Full Code Here

        private void setUpHashTable() throws IOException {
            FileSpec replFile = new FileSpec(repl, new FuncSpec(PigStorage.class.getName() + "()"));
            POLoad ld = new POLoad(new OperatorKey("Repl File Loader", 1L), replFile);
            PigContext pc = new PigContext(ExecType.MAPREDUCE, PigMapReduce.sJobConfInternal.get());
            pc.connect();

            ld.setPc(pc);
            Tuple dummyTuple = null;
            for (Result res = ld.getNextTuple(); res.returnStatus != POStatus.STATUS_EOP; res = ld
                    .getNextTuple()) {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.