Examples of GetHdfsBlockLocationsResponseProto


Examples of org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetHdfsBlockLocationsResponseProto

        GetHdfsBlockLocationsRequestProto.newBuilder()
        .addAllBlocks(blocksProtos)
        .addAllTokens(tokensProtos)
        .build();
    // Send the RPC
    GetHdfsBlockLocationsResponseProto response;
    try {
      response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
    } catch (ServiceException e) {
      throw ProtobufHelper.getRemoteException(e);
    }
    // List of volumes in the response
    List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
    List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
    for (ByteString bs : volumeIdsByteStrings) {
      volumeIds.add(bs.toByteArray());
    }
    // Array of indexes into the list of volumes, one per block
    List<Integer> volumeIndexes = response.getVolumeIndexesList();
    // Parsed HdfsVolumeId values, one per block
    return new HdfsBlocksMetadata(blocks.toArray(new ExtendedBlock[] {}),
        volumeIds, volumeIndexes);
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetHdfsBlockLocationsResponseProto

        GetHdfsBlockLocationsRequestProto.newBuilder()
        .addAllBlocks(blocksProtos)
        .addAllTokens(tokensProtos)
        .build();
    // Send the RPC
    GetHdfsBlockLocationsResponseProto response;
    try {
      response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
    } catch (ServiceException e) {
      throw ProtobufHelper.getRemoteException(e);
    }
    // List of volumes in the response
    List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
    List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
    for (ByteString bs : volumeIdsByteStrings) {
      volumeIds.add(bs.toByteArray());
    }
    // Array of indexes into the list of volumes, one per block
    List<Integer> volumeIndexes = response.getVolumeIndexesList();
    // Parsed HdfsVolumeId values, one per block
    return new HdfsBlocksMetadata(blocks.toArray(new ExtendedBlock[] {}),
        volumeIds, volumeIndexes);
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetHdfsBlockLocationsResponseProto

        .setBlockPoolId(blockPoolId)
        .addAllBlockIds(Longs.asList(blockIds))
        .addAllTokens(tokensProtos)
        .build();
    // Send the RPC
    GetHdfsBlockLocationsResponseProto response;
    try {
      response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
    } catch (ServiceException e) {
      throw ProtobufHelper.getRemoteException(e);
    }
    // List of volumes in the response
    List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
    List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
    for (ByteString bs : volumeIdsByteStrings) {
      volumeIds.add(bs.toByteArray());
    }
    // Array of indexes into the list of volumes, one per block
    List<Integer> volumeIndexes = response.getVolumeIndexesList();
    // Parsed HdfsVolumeId values, one per block
    return new HdfsBlocksMetadata(blockPoolId, blockIds,
        volumeIds, volumeIndexes);
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.