Package org.apache.helix

Examples of org.apache.helix.PropertyKey


      HelixDataAccessor helixDataAccessor = manager.getHelixDataAccessor();
      Builder keyBuilder = helixDataAccessor.keyBuilder();

      for (int j = 0; j < 100; j++) {
        Thread.sleep(200);
        PropertyKey controllerTaskStatus =
            keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
        ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
        if (statusUpdate.getMapFields().containsKey("Summary")) {
          break;
        }
      }

      PropertyKey controllerTaskStatus =
          keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
      ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
      Assert.assertTrue(statusUpdate.getMapField("SentMessageCount").get("MessageCount")
          .equals("" + (_PARTITIONS * 3 / 5)));
      int messageResultCount = 0;
View Full Code Here


    }
    for (int i = 0; i < NODE_NR; i++) {
      String msgId = msgIds.get(i);
      for (int j = 0; j < 100; j++) {
        Thread.sleep(200);
        PropertyKey controllerTaskStatus =
            keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
        ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
        if (statusUpdate.getMapFields().containsKey("Summary")) {
          // System.err.println(msgId+" done");
          break;
        }
      }

      PropertyKey controllerTaskStatus =
          keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
      ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
      Assert.assertTrue(statusUpdate.getMapField("SentMessageCount").get("MessageCount")
          .equals("" + (_PARTITIONS * 3 / 5)));
      int messageResultCount = 0;
      for (String key : statusUpdate.getMapFields().keySet()) {
        if (key.startsWith("MessageResult")) {
          messageResultCount++;
        }
      }
      if (messageResultCount != _PARTITIONS * 3 / 5) {
        int x = 10;
        x = x + messageResultCount;
      }
      Assert.assertEquals(messageResultCount, _PARTITIONS * 3 / 5);
    }

    for (int j = 0; j < 100; j++) {
      Thread.sleep(200);
      PropertyKey controllerTaskStatus =
          keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgIdPrime);
      ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
      if (statusUpdate.getMapFields().containsKey("Summary")) {
        break;
      }
View Full Code Here

        callback._message.getResultMap()
            .get(DefaultSchedulerMessageHandlerFactory.SCHEDULER_MSG_ID);

    for (int j = 0; j < 10; j++) {
      Thread.sleep(200);
      PropertyKey controllerTaskStatus =
          keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
      ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
      if (statusUpdate.getMapFields().containsKey("SentMessageCount")) {
        Assert.assertEquals(
            statusUpdate.getMapFields().get("SentMessageCount").get("MessageCount"), ""
                + (_PARTITIONS * 3));
        break;
      }
    }

    for (int i = 0; i < _PARTITIONS * 3 / 5; i++) {
      for (int j = 0; j < 10; j++) {
        Thread.sleep(300);
        if (factory._messageCount == 5 * (i + 1))
          break;
      }
      Thread.sleep(300);
      Assert.assertEquals(factory._messageCount, 5 * (i + 1));
      factory.signal();
      // System.err.println(i);
    }

    for (int j = 0; j < 10; j++) {
      Thread.sleep(200);
      PropertyKey controllerTaskStatus =
          keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
      ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
      if (statusUpdate.getMapFields().containsKey("Summary")) {
        break;
      }
    }

    Assert.assertEquals(_PARTITIONS, factory._results.size());
    PropertyKey controllerTaskStatus =
        keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), msgId);
    ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus).getRecord();
    Assert.assertTrue(statusUpdate.getMapField("SentMessageCount").get("MessageCount")
        .equals("" + (_PARTITIONS * 3)));
    int messageResultCount = 0;
View Full Code Here

    if (!partitionsToDrop.isEmpty()) {
      for (Integer pId : partitionsToDrop) {
        taskIs.getRecord().getMapFields().remove(pName(resourceName, pId));
      }
      HelixDataAccessor accessor = _manager.getHelixDataAccessor();
      PropertyKey propertyKey = accessor.keyBuilder().idealStates(resourceName);
      accessor.setProperty(propertyKey, taskIs);
    }

    // Update rebalancer context, previous ideal state.
    TaskUtil.setJobContext(_manager, resourceName, jobCtx);
View Full Code Here

  private static void cleanup(HelixManager mgr, final String resourceName, WorkflowConfig cfg,
      String workflowResource) {
    HelixDataAccessor accessor = mgr.getHelixDataAccessor();

    // Remove any DAG references in workflow
    PropertyKey workflowKey = getConfigPropertyKey(accessor, workflowResource);
    DataUpdater<ZNRecord> dagRemover = new DataUpdater<ZNRecord>() {
      @Override
      public ZNRecord update(ZNRecord currentData) {
        JobDag jobDag = JobDag.fromJson(currentData.getSimpleField(WorkflowConfig.DAG));
        for (String child : jobDag.getDirectChildren(resourceName)) {
          jobDag.getChildrenToParents().get(child).remove(resourceName);
        }
        for (String parent : jobDag.getDirectParents(resourceName)) {
          jobDag.getParentsToChildren().get(parent).remove(resourceName);
        }
        jobDag.getChildrenToParents().remove(resourceName);
        jobDag.getParentsToChildren().remove(resourceName);
        jobDag.getAllNodes().remove(resourceName);
        try {
          currentData.setSimpleField(WorkflowConfig.DAG, jobDag.toJson());
        } catch (Exception e) {
          LOG.equals("Could not update DAG for job " + resourceName);
        }
        return currentData;
      }
    };
    accessor.getBaseDataAccessor().update(workflowKey.getPath(), dagRemover,
        AccessOption.PERSISTENT);

    // Delete resource configs.
    PropertyKey cfgKey = getConfigPropertyKey(accessor, resourceName);
    if (!accessor.removeProperty(cfgKey)) {
      throw new RuntimeException(
          String
              .format(
                  "Error occurred while trying to clean up job %s. Failed to remove node %s from Helix. Aborting further clean up steps.",
                  resourceName, cfgKey));
    }
    // Delete property store information for this resource.
    String propStoreKey = getRebalancerPropStoreKey(resourceName);
    if (!mgr.getHelixPropertyStore().remove(propStoreKey, AccessOption.PERSISTENT)) {
      throw new RuntimeException(
          String
              .format(
                  "Error occurred while trying to clean up job %s. Failed to remove node %s from Helix. Aborting further clean up steps.",
                  resourceName, propStoreKey));
    }
    // Finally, delete the ideal state itself.
    PropertyKey isKey = getISPropertyKey(accessor, resourceName);
    if (!accessor.removeProperty(isKey)) {
      throw new RuntimeException(String.format(
          "Error occurred while trying to clean up task %s. Failed to remove node %s from Helix.",
          resourceName, isKey));
    }
    LOG.info(String.format("Successfully cleaned up job resource %s.", resourceName));

    boolean lastInWorkflow = true;
    for (String job : cfg.getJobDag().getAllNodes()) {
      // check if property store information or resource configs exist for this job
      if (mgr.getHelixPropertyStore().exists(getRebalancerPropStoreKey(job),
          AccessOption.PERSISTENT)
          || accessor.getProperty(getConfigPropertyKey(accessor, job)) != null
          || accessor.getProperty(getISPropertyKey(accessor, job)) != null) {
        lastInWorkflow = false;
      }
    }

    // clean up workflow-level info if this was the last in workflow
    if (lastInWorkflow && cfg.isTerminable()) {
      // delete workflow config
      PropertyKey workflowCfgKey = getConfigPropertyKey(accessor, workflowResource);
      if (!accessor.removeProperty(workflowCfgKey)) {
        throw new RuntimeException(
            String
                .format(
                    "Error occurred while trying to clean up workflow %s. Failed to remove node %s from Helix. Aborting further clean up steps.",
View Full Code Here

      String sessionId, String resource, String partition, TaskPartitionState state) {
    LOG.debug(String.format("Requesting a state transition to %s for partition %s.", state,
        partition));
    try {
      PropertyKey.Builder keyBuilder = accessor.keyBuilder();
      PropertyKey key = keyBuilder.currentState(instance, sessionId, resource);
      CurrentState currStateDelta = new CurrentState(resource);
      currStateDelta.setRequestedState(partition, state.name());

      return accessor.updateProperty(key, currStateDelta);
    } catch (Exception e) {
View Full Code Here

    {
      String instance = _manager.getInstanceName();
      String sessionId = _message.getTgtSessionId();
      String resource = _message.getResourceName();
      ZNRecordBucketizer bucketizer = new ZNRecordBucketizer(_message.getBucketSize());
      PropertyKey key = accessor.keyBuilder().currentState(instance,
                                                           sessionId,
                                                           resource,
                                                           bucketizer.getBucketName(partitionName));
      ZNRecord rec = new ZNRecord(resource);
      Map<String, String> map = new TreeMap<String, String>();
View Full Code Here

      }
    }

    try {
      // Update the ZK current state of the node
      PropertyKey key =
          keyBuilder.currentState(instanceName, sessionId, resource,
              bucketizer.getBucketName(partitionKey));
      if (_message.getAttribute(Attributes.PARENT_MSG_ID) == null) {
        // normal message
        accessor.updateProperty(key, _currentStateDelta);
View Full Code Here

      }
    }
    List<CurrentState> currentStates = accessor.getProperty(currentStateKeys);
    Iterator<PropertyKey> csKeyIter = currentStateKeys.iterator();
    for (CurrentState currentState : currentStates) {
      PropertyKey key = csKeyIter.next();
      String[] params = key.getParams();
      if (currentState != null && params.length >= 4) {
        Map<String, Map<String, CurrentState>> instanceCurStateMap = allCurStateMap.get(params[1]);
        Map<String, CurrentState> sessionCurStateMap = instanceCurStateMap.get(params[2]);
        sessionCurStateMap.put(params[3], currentState);
      }
View Full Code Here

      }
    }

    if (controllerMsgUpdates.size() > 0) {
      for (String controllerMsgId : controllerMsgUpdates.keySet()) {
        PropertyKey controllerStatusUpdateKey =
            keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), controllerMsgId);
        StatusUpdate controllerStatusUpdate = accessor.getProperty(controllerStatusUpdateKey);
        for (String taskPartitionName : controllerMsgUpdates.get(controllerMsgId).keySet()) {
          Map<String, String> result = new HashMap<String, String>();
          result.put("Result", controllerMsgUpdates.get(controllerMsgId).get(taskPartitionName));
View Full Code Here

TOP

Related Classes of org.apache.helix.PropertyKey

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.