Examples of CacheDirectiveInfo


Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

    for (Entry<Long, CacheDirective> cur : tailMap.entrySet()) {
      if (numReplies >= maxListCacheDirectivesNumResponses) {
        return new BatchedListEntries<CacheDirectiveEntry>(replies, true);
      }
      CacheDirective curDirective = cur.getValue();
      CacheDirectiveInfo info = cur.getValue().toInfo();

      // If the requested ID is present, it should be the first item.
      // Hitting this case means the ID is not present, or we're on the second
      // item and should break out.
      if (id != null &&
          !(info.getId().equals(id))) {
        break;
      }
      if (filter.getPool() != null &&
          !info.getPool().equals(filter.getPool())) {
        continue;
      }
      if (filterPath != null &&
          !info.getPath().toUri().getPath().equals(filterPath)) {
        continue;
      }
      boolean hasPermission = true;
      if (pc != null) {
        try {
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

      pools.add(b.build());
    }

    for (CacheDirective directive : directivesById.values()) {
      CacheDirectiveInfo info = directive.toInfo();
      CacheDirectiveInfoProto.Builder b = CacheDirectiveInfoProto.newBuilder()
          .setId(info.getId());

      if (info.getPath() != null) {
        b.setPath(info.getPath().toUri().getPath());
      }

      if (info.getReplication() != null) {
        b.setReplication(info.getReplication());
      }

      if (info.getPool() != null) {
        b.setPool(info.getPool());
      }

      Expiration expiry = info.getExpiration();
      if (expiry != null) {
        assert (!expiry.isRelative());
        b.setExpiration(PBHelper.convert(expiry));
      }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

    final int poolCount = 7;
    HashSet<String> poolNames = new HashSet<String>(poolCount);
    Path path = new Path("/p");
    for (int i=0; i<poolCount; i++) {
      String poolName = "testListCacheDirectives-" + i;
      CacheDirectiveInfo directiveInfo =
        new CacheDirectiveInfo.Builder().setPool(poolName).setPath(path).build();
      dfs.addCachePool(new CachePoolInfo(poolName));
      dfs.addCacheDirective(directiveInfo, EnumSet.of(CacheFlag.FORCE));
      poolNames.add(poolName);
    }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

                new CacheDirectiveInfo.Builder().
                    setPool(directive.getPool()).
                    setPath(directive.getPath()).
                    build());
        while (iter.hasNext()) {
          CacheDirectiveInfo result = iter.next().getInfo();
          if ((result.getId() == id) &&
              (result.getReplication().shortValue() == newReplication)) {
            return true;
          }
        }
        Thread.sleep(1000);
      }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

        System.err.println("Can't understand argument: " + args.get(0));
        return 1;
      }
       
      DistributedFileSystem dfs = getDFS(conf);
      CacheDirectiveInfo directive = builder.build();
      EnumSet<CacheFlag> flags = EnumSet.noneOf(CacheFlag.class);
      if (force) {
        flags.add(CacheFlag.FORCE);
      }
      try {
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

        RemoteIterator<CacheDirectiveEntry> iter =
            dfs.listCacheDirectives(builder.build());
        int numEntries = 0;
        while (iter.hasNext()) {
          CacheDirectiveEntry entry = iter.next();
          CacheDirectiveInfo directive = entry.getInfo();
          CacheDirectiveStats stats = entry.getStats();
          List<String> row = new LinkedList<String>();
          row.add("" + directive.getId());
          row.add(directive.getPool());
          row.add("" + directive.getReplication());
          String expiry;
          // This is effectively never, round for nice printing
          if (directive.getExpiration().getMillis() >
              Expiration.MAX_RELATIVE_EXPIRY_MS / 2) {
            expiry = "never";
          } else {
            expiry = directive.getExpiration().toString();
          }
          row.add(expiry);
          row.add(directive.getPath().toUri().getPath());
          if (printStats) {
            row.add("" + stats.getBytesNeeded());
            row.add("" + stats.getBytesCached());
            row.add("" + stats.getFilesNeeded());
            row.add("" + stats.getFilesCached());
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

      public CacheDirectiveEntry next() throws IOException {
        // Although the paths we get back from the NameNode should always be
        // absolute, we call makeQualified to add the scheme and authority of
        // this DistributedFilesystem.
        CacheDirectiveEntry desc = iter.next();
        CacheDirectiveInfo info = desc.getInfo();
        Path p = info.getPath().makeQualified(getUri(), getWorkingDirectory());
        return new CacheDirectiveEntry(
            new CacheDirectiveInfo.Builder(info).setPath(p).build(),
            desc.getStats());
      }
    };
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

  @Override
  public ListCacheDirectivesResponseProto listCacheDirectives(
      RpcController controller, ListCacheDirectivesRequestProto request)
          throws ServiceException {
    try {
      CacheDirectiveInfo filter =
          PBHelper.convert(request.getFilter());
      BatchedEntries<CacheDirectiveEntry> entries =
        server.listCacheDirectives(request.getPrevId(), filter);
      ListCacheDirectivesResponseProto.Builder builder =
          ListCacheDirectivesResponseProto.newBuilder();
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

    builder.setStats(PBHelper.convert(entry.getStats()));
    return builder.build();
  }
 
  public static CacheDirectiveEntry convert(CacheDirectiveEntryProto proto) {
    CacheDirectiveInfo info = PBHelper.convert(proto.getInfo());
    CacheDirectiveStats stats = PBHelper.convert(proto.getStats());
    return new CacheDirectiveEntry(info, stats);
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo

                new CacheDirectiveInfo.Builder().
                    setPool(directive.getPool()).
                    setPath(directive.getPath()).
                    build());
        while (iter.hasNext()) {
          CacheDirectiveInfo result = iter.next().getInfo();
          if ((result.getId() == id) &&
              (result.getReplication().shortValue() == newReplication)) {
            return true;
          }
        }
        Thread.sleep(1000);
      }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.