Package com.google.appengine.tools.mapreduce.impl.shardedjob

Examples of com.google.appengine.tools.mapreduce.impl.shardedjob.ShardedJobSettings$Builder


  @Test
  public void testCleanupJob() throws Exception {
    ShardedJobService jobService = ShardedJobServiceFactory.getShardedJobService();
    assertTrue(jobService.cleanupJob("testCleanupJob")); // No such job yet
    ShardedJobSettings settings = new ShardedJobSettings.Builder().build();
    ShardedJobController<TestTask> controller = new DummyWorkerController();
    byte[] bytes = new byte[1024 * 1024];
    new Random().nextBytes(bytes);
    TestTask s1 = new TestTask(0, 2, 2, 2, bytes);
    TestTask s2 = new TestTask(1, 2, 2, 1);
View Full Code Here


  // Tests that an job that has just been initialized returns a reasonable job detail.
  @Test
  public void testGetJobDetail_empty() throws Exception {
    ShardedJobService jobService = ShardedJobServiceFactory.getShardedJobService();
    ShardedJobSettings settings = new ShardedJobSettings.Builder().build();
    ShardedJobController<TestTask> controller = new DummyWorkerController();
    jobService.startJob("testGetJobDetail_empty", ImmutableList.<TestTask>of(), controller,
        settings);

    JSONObject result = StatusHandler.handleGetJobDetail("testGetJobDetail_empty");
View Full Code Here

  // Tests that a populated job (with a couple of shards) generates a reasonable job detail.
  @Test
  public void testGetJobDetail_populated() throws Exception {
    ShardedJobService jobService = ShardedJobServiceFactory.getShardedJobService();
    ShardedJobSettings settings = new ShardedJobSettings.Builder().build();
    ShardedJobController<TestTask> controller = new DummyWorkerController();
    TestTask s1 = new TestTask(0, 2, 2, 2);
    TestTask s2 = new TestTask(1, 2, 2, 1);
    jobService.startJob("testGetJobDetail_populated", ImmutableList.of(s1, s2), controller,
        settings);
View Full Code Here

  }

  public void testMakeShardedJobSettings() {
    Key key = KeyFactory.createKey("Kind1", "value1");
    MapSettings settings = new MapSettings.Builder().setWorkerQueueName("good-queue").build();
    ShardedJobSettings sjSettings = settings.toShardedJobSettings("job1", key);
    assertNull(sjSettings.getBackend());
    assertEquals("default", sjSettings.getModule());
    assertEquals("1", sjSettings.getVersion());
    assertEquals("1.default.test.localhost", sjSettings.getTaskQueueTarget());
    assertEquals(settings.getWorkerQueueName(), sjSettings.getQueueName());
    assertEquals(getPath(settings, "job1", CONTROLLER_PATH), sjSettings.getControllerPath());
    assertEquals(getPath(settings, "job1", WORKER_PATH), sjSettings.getWorkerPath());
    assertEquals(makeViewerUrl(key, key), sjSettings.getPipelineStatusUrl());
    assertEquals(settings.getMaxShardRetries(), sjSettings.getMaxShardRetries());
    assertEquals(settings.getMaxSliceRetries(), sjSettings.getMaxSliceRetries());

    settings = new MapSettings.Builder(settings).setModule(null).setBackend("b1").build();
    sjSettings = settings.toShardedJobSettings("job1", key);
    assertEquals("backend-hostname", sjSettings.getTaskQueueTarget());
    assertEquals("b1", sjSettings.getBackend());
    assertNull(sjSettings.getModule());
    assertNull(sjSettings.getVersion());

    settings = new MapSettings.Builder(settings).setBackend(null).setModule("module1").build();
    sjSettings = settings.toShardedJobSettings("job1", key);
    assertNull(sjSettings.getBackend());
    assertEquals("module1", sjSettings.getModule());
    assertEquals("v1", sjSettings.getVersion());

    settings = new MapSettings.Builder(settings).setModule("default").build();
    Environment env = ApiProxy.getCurrentEnvironment();
    Environment mockEnv = EasyMock.createNiceMock(Environment.class);
    EasyMock.expect(mockEnv.getModuleId()).andReturn("default").atLeastOnce();
    EasyMock.expect(mockEnv.getVersionId()).andReturn("2").atLeastOnce();
    EasyMock.expect(mockEnv.getAttributes()).andReturn(env.getAttributes()).anyTimes();
    EasyMock.replay(mockEnv);
    ApiProxy.setEnvironmentForCurrentThread(mockEnv);
    // Test when current module is the same as requested module
    try {
      sjSettings = settings.toShardedJobSettings("job1", key);
      assertNull(sjSettings.getBackend());
      assertEquals("default", sjSettings.getModule());
      assertEquals("2", sjSettings.getVersion());
    } finally {
      ApiProxy.setEnvironmentForCurrentThread(env);
    }
    EasyMock.verify(mockEnv);
  }
View Full Code Here

        ImmutableList.builder();
    for (int i = 0; i < readers.size(); i++) {
      mapTasks.add(new MapOnlyShardTask<>(jobId, i, readers.size(), readers.get(i),
          specification.getMapper(), writers.get(i), settings.getMillisPerSlice()));
    }
    ShardedJobSettings shardedJobSettings = settings.toShardedJobSettings(jobId, getPipelineKey());
    PromisedValue<ResultAndStatus<R>> resultAndStatus = newPromise();
    WorkerController<I, O, R, MapOnlyMapperContext<O>> workerController = new WorkerController<>(
        jobId, new CountersImpl(), output, resultAndStatus.getHandle());
    ShardedJob<?> shardedJob =
        new ShardedJob<>(jobId, mapTasks.build(), workerController, shardedJobSettings);
    FutureValue<Void> shardedJobResult = futureCall(shardedJob, settings.toJobSettings());
    JobSetting[] jobSetting = settings.toJobSettings(waitFor(shardedJobResult),
            statusConsoleUrl(shardedJobSettings.getMapReduceStatusUrl()), maxAttempts(1));
    return futureCall(new ExamineStatusAndReturnResult<R>(jobId), resultAndStatus, jobSetting);
  }
View Full Code Here

            readers.get(i),
            new SortWorker(settings.getMaxSortMemory(), settings.getSortBatchPerEmitBytes()),
            writers.get(i),
            settings.getSortReadTimeMillis()));
      }
      ShardedJobSettings shardedJobSettings =
          settings.toShardedJobSettings(shardedJobId, getPipelineKey());

      PromisedValue<ResultAndStatus<FilesByShard>> resultAndStatus = newPromise();
      WorkerController<KeyValue<ByteBuffer, ByteBuffer>, KeyValue<ByteBuffer, List<ByteBuffer>>,
          FilesByShard, SortContext> workerController = new WorkerController<>(mrJobId,
          mapResult.getCounters(), output, resultAndStatus.getHandle());

      ShardedJob<?> shardedJob =
          new ShardedJob<>(shardedJobId, sortTasks.build(), workerController, shardedJobSettings);
      FutureValue<Void> shardedJobResult = futureCall(shardedJob, settings.toJobSettings());

      return futureCall(new ExamineStatusAndReturnResult<FilesByShard>(shardedJobId),
          resultAndStatus, settings.toJobSettings(waitFor(shardedJobResult),
              statusConsoleUrl(shardedJobSettings.getMapReduceStatusUrl())));
    }
View Full Code Here

            readers.size(),
            readers.get(i),
            writers.get(i),
            settings.getSortReadTimeMillis()));
      }
      ShardedJobSettings shardedJobSettings =
          settings.toShardedJobSettings(shardedJobId, getPipelineKey());

      PromisedValue<ResultAndStatus<FilesByShard>> resultAndStatus = newPromise();
      WorkerController<KeyValue<ByteBuffer, Iterator<ByteBuffer>>,
          KeyValue<ByteBuffer, List<ByteBuffer>>, FilesByShard, MergeContext> workerController =
          new WorkerController<>(mrJobId, priorResult.getCounters(), output,
              resultAndStatus.getHandle());
      ShardedJob<?> shardedJob =
          new ShardedJob<>(shardedJobId, mergeTasks.build(), workerController, shardedJobSettings);
      FutureValue<Void> shardedJobResult = futureCall(shardedJob, settings.toJobSettings());

      FutureValue<MapReduceResult<FilesByShard>> finished = futureCall(
          new ExamineStatusAndReturnResult<FilesByShard>(shardedJobId),
          resultAndStatus, settings.toJobSettings(waitFor(shardedJobResult),
              statusConsoleUrl(shardedJobSettings.getMapReduceStatusUrl())));
      futureCall(new Cleanup(settings), immediate(priorResult), waitFor(finished));
      return futureCall(new MergeJob(mrJobId, mrSpec, settings, tier + 1), finished,
          settings.toJobSettings(maxAttempts(1)));
    }
View Full Code Here

          reduceTasks = ImmutableList.builder();
      for (int i = 0; i < readers.size(); i++) {
        reduceTasks.add(new ReduceShardTask<>(mrJobId, i, readers.size(), readers.get(i),
            mrSpec.getReducer(), writers.get(i), settings.getMillisPerSlice()));
      }
      ShardedJobSettings shardedJobSettings =
          settings.toShardedJobSettings(shardedJobId, getPipelineKey());
      PromisedValue<ResultAndStatus<R>> resultAndStatus = newPromise();
      WorkerController<KeyValue<K, Iterator<V>>, O, R, ReducerContext<O>> workerController =
          new WorkerController<>(mrJobId, mergeResult.getCounters(), output,
              resultAndStatus.getHandle());
      ShardedJob<?> shardedJob =
          new ShardedJob<>(shardedJobId, reduceTasks.build(), workerController, shardedJobSettings);
      FutureValue<Void> shardedJobResult = futureCall(shardedJob, settings.toJobSettings());
      return futureCall(new ExamineStatusAndReturnResult<R>(shardedJobId), resultAndStatus,
          settings.toJobSettings(waitFor(shardedJobResult),
              statusConsoleUrl(shardedJobSettings.getMapReduceStatusUrl())));
    }
View Full Code Here

          ImmutableList.builder();
      for (int i = 0; i < readers.size(); i++) {
        mapTasks.add(new MapShardTask<>(mrJobId, i, readers.size(), readers.get(i),
            mrSpec.getMapper(), writers.get(i), settings.getMillisPerSlice()));
      }
      ShardedJobSettings shardedJobSettings =
          settings.toShardedJobSettings(shardedJobId, getPipelineKey());

      PromisedValue<ResultAndStatus<FilesByShard>> resultAndStatus = newPromise();
      WorkerController<I, KeyValue<K, V>, FilesByShard, MapperContext<K, V>> workerController =
          new WorkerController<>(mrJobId, new CountersImpl(), output, resultAndStatus.getHandle());
      ShardedJob<?> shardedJob =
          new ShardedJob<>(shardedJobId, mapTasks.build(), workerController, shardedJobSettings);
      FutureValue<Void> shardedJobResult = futureCall(shardedJob, settings.toJobSettings());
      return futureCall(new ExamineStatusAndReturnResult<FilesByShard>(shardedJobId),
          resultAndStatus, settings.toJobSettings(waitFor(shardedJobResult),
              statusConsoleUrl(shardedJobSettings.getMapReduceStatusUrl())));
    }
View Full Code Here

  @Override public String toString() {
    return getClass().getSimpleName() + "()";
  }

  private static ShardedJobSettings makeShardedJobSettings(MapReduceSettings mrSettings) {
    return new ShardedJobSettings()
        .setControllerPath(mrSettings.getBaseUrl() + MapReduceServletImpl.CONTROLLER_PATH)
        .setWorkerPath(mrSettings.getBaseUrl() + MapReduceServletImpl.WORKER_PATH)
        .setControllerBackend(mrSettings.getBackend())
        .setWorkerBackend(mrSettings.getBackend())
        .setControllerQueueName(mrSettings.getControllerQueueName())
View Full Code Here

TOP

Related Classes of com.google.appengine.tools.mapreduce.impl.shardedjob.ShardedJobSettings$Builder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.