Package org.apache.hadoop.hdfs.protocol

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing


    String path = file.getFullName(parent);
    boolean isOpen = false;

    if (file.isDir()) {
      byte[] lastReturnedName = HdfsFileStatus.EMPTY_NAME;
      DirectoryListing thisListing;
      if (showFiles) {
        out.println(path + " <dir>");
      }
      res.totalDirs++;
      do {
        assert lastReturnedName != null;
        thisListing = namenode.getRpcServer().getListing(
            path, lastReturnedName, false);
        if (thisListing == null) {
          return;
        }
        HdfsFileStatus[] files = thisListing.getPartialListing();
        for (int i = 0; i < files.length; i++) {
          check(path, files[i], res);
        }
        lastReturnedName = thisListing.getLastName();
      } while (thisListing.hasMore());
      return;
    }
    long fileLen = file.getLen();
    // Get block locations without updating the file access time
    // and without block access tokens
View Full Code Here


  }

  @Override // ClientProtocol
  public DirectoryListing getListing(String src, byte[] startAfter,
      boolean needLocation) throws IOException {
    DirectoryListing files = namesystem.getListing(
        src, startAfter, needLocation);
    if (files != null) {
      metrics.incrGetListingOps();
      metrics.incrFilesInGetListingOps(files.getPartialListing().length);
    }
    return files;
  }
View Full Code Here

    }
  }

  private static DirectoryListing getDirectoryListing(final NamenodeProtocols np,
      final String p, byte[] startAfter) throws IOException {
    final DirectoryListing listing = np.getListing(p, startAfter, false);
    if (listing == null) { // the directory does not exist
      throw new FileNotFoundException("File " + p + " does not exist.");
    }
    return listing;
  }
View Full Code Here

    return listing;
  }
 
  private static StreamingOutput getListingStream(final NamenodeProtocols np,
      final String p) throws IOException {
    final DirectoryListing first = getDirectoryListing(np, p,
        HdfsFileStatus.EMPTY_NAME);

    return new StreamingOutput() {
      @Override
      public void write(final OutputStream outstream) throws IOException {
        final PrintStream out = new PrintStream(outstream);
        out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
            + FileStatus.class.getSimpleName() + "\":[");

        final HdfsFileStatus[] partial = first.getPartialListing();
        if (partial.length > 0) {
          out.print(JsonUtil.toJsonString(partial[0], false));
        }
        for(int i = 1; i < partial.length; i++) {
          out.println(',');
View Full Code Here

   * @throws IOException if other I/O error occurred
   */
  DirectoryListing getListing(String src, byte[] startAfter,
      boolean needLocation)
    throws AccessControlException, UnresolvedLinkException, IOException {
    DirectoryListing dl;
    readLock();
    try {
      checkOperation(OperationCategory.READ);

      if (isPermissionEnabled) {
View Full Code Here

  public FileStatus[] listStatus(Path f)
      throws IOException, UnresolvedLinkException {
    String src = getUriPath(f);

    // fetch the first batch of entries in the directory
    DirectoryListing thisListing = dfs.listPaths(
        src, HdfsFileStatus.EMPTY_NAME);

    if (thisListing == null) { // the directory does not exist
      throw new FileNotFoundException("File " + f + " does not exist.");
    }
   
    HdfsFileStatus[] partialListing = thisListing.getPartialListing();
    if (!thisListing.hasMore()) { // got all entries of the directory
      FileStatus[] stats = new FileStatus[partialListing.length];
      for (int i = 0; i < partialListing.length; i++) {
        stats[i] = makeQualified(partialListing[i], f);
      }
      return stats;
    }

    // The directory size is too big that it needs to fetch more
    // estimate the total number of entries in the directory
    int totalNumEntries =
      partialListing.length + thisListing.getRemainingEntries();
    ArrayList<FileStatus> listing =
      new ArrayList<FileStatus>(totalNumEntries);
    // add the first batch of entries to the array list
    for (HdfsFileStatus fileStatus : partialListing) {
      listing.add(makeQualified(fileStatus, f));
    }
    // now fetch more entries
    do {
      thisListing = dfs.listPaths(src, thisListing.getLastName());
      if (thisListing == null) {
        // the directory is deleted
        throw new FileNotFoundException("File " + f + " does not exist.");
      }
      partialListing = thisListing.getPartialListing();
      for (HdfsFileStatus fileStatus : partialListing) {
        listing.add(makeQualified(fileStatus, f));
      }
    } while (thisListing.hasMore());
    return listing.toArray(new FileStatus[listing.size()]);
  }
View Full Code Here

      GetListingResponseProto.newBuilder().build();
  @Override
  public GetListingResponseProto getListing(RpcController controller,
      GetListingRequestProto req) throws ServiceException {
    try {
      DirectoryListing result = server.getListing(
          req.getSrc(), req.getStartAfter().toByteArray(),
          req.getNeedLocation());
      if (result !=null) {
        return GetListingResponseProto.newBuilder().setDirList(
          PBHelper.convert(result)).build();
View Full Code Here

  @Override
  public FileStatus[] listStatus(Path p) throws IOException {
    String src = getPathName(p);

    // fetch the first batch of entries in the directory
    DirectoryListing thisListing = dfs.listPaths(
        src, HdfsFileStatus.EMPTY_NAME);

    if (thisListing == null) { // the directory does not exist
      throw new FileNotFoundException("File " + p + " does not exist.");
    }
   
    HdfsFileStatus[] partialListing = thisListing.getPartialListing();
    if (!thisListing.hasMore()) { // got all entries of the directory
      FileStatus[] stats = new FileStatus[partialListing.length];
      for (int i = 0; i < partialListing.length; i++) {
        stats[i] = makeQualified(partialListing[i], p);
      }
      statistics.incrementReadOps(1);
      return stats;
    }

    // The directory size is too big that it needs to fetch more
    // estimate the total number of entries in the directory
    int totalNumEntries =
      partialListing.length + thisListing.getRemainingEntries();
    ArrayList<FileStatus> listing =
      new ArrayList<FileStatus>(totalNumEntries);
    // add the first batch of entries to the array list
    for (HdfsFileStatus fileStatus : partialListing) {
      listing.add(makeQualified(fileStatus, p));
    }
    statistics.incrementLargeReadOps(1);
    // now fetch more entries
    do {
      thisListing = dfs.listPaths(src, thisListing.getLastName());
      if (thisListing == null) { // the directory is deleted
        throw new FileNotFoundException("File " + p + " does not exist.");
      }
      partialListing = thisListing.getPartialListing();
      for (HdfsFileStatus fileStatus : partialListing) {
        listing.add(makeQualified(fileStatus, p));
      }
      statistics.incrementLargeReadOps(1);
    } while (thisListing.hasMore());
    return listing.toArray(new FileStatus[listing.size()]);
  }
View Full Code Here

            + "&namenodeInfoPort=" + namenodeInfoPort
            + JspHelper.getDelegationTokenUrlParam(tokenString)
            + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr)
            + "\">Go to parent directory</a><br>");

      DirectoryListing thisListing =
        dfs.listPaths(target, HdfsFileStatus.EMPTY_NAME);
      if (thisListing == null || thisListing.getPartialListing().length == 0) {
        out.print("Empty directory");
      } else {
        JspHelper.addTableHeader(out);
        int row = 0;
        JspHelper.addTableRow(out, headings, row++);
        String cols[] = new String[headings.length];
        do {
          HdfsFileStatus[] files = thisListing.getPartialListing();
          for (int i = 0; i < files.length; i++) {
            String localFileName = files[i].getLocalName();
            // Get the location of the first block of the file
            if (!files[i].isDir()) {
              cols[1] = "file";
              cols[2] = StringUtils.byteDesc(files[i].getLen());
              cols[3] = Short.toString(files[i].getReplication());
              cols[4] = StringUtils.byteDesc(files[i].getBlockSize());
            } else {
              cols[1] = "dir";
              cols[2] = "";
              cols[3] = "";
              cols[4] = "";
            }
            String datanodeUrl = req.getRequestURL() + "?dir="
              + URLEncoder.encode(files[i].getFullName(target), "UTF-8")
              + "&namenodeInfoPort=" + namenodeInfoPort
              + JspHelper.getDelegationTokenUrlParam(tokenString)
              + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr);
            cols[0] = "<a href=\"" + datanodeUrl + "\">"
              + localFileName + "</a>";
            cols[5] = lsDateFormat.format(new Date((files[i]
              .getModificationTime())));
            cols[6] = files[i].getPermission().toString();
            cols[7] = files[i].getOwner();
            cols[8] = files[i].getGroup();
            JspHelper.addTableRow(out, cols, row++);
          }
          if (!thisListing.hasMore()) {
            break;
          }
          thisListing = dfs.listPaths(target, thisListing.getLastName());
        } while (thisListing != null);
        JspHelper.addTableFooter(out);
      }
    }
    out.print("<br><a href=\"http://"
View Full Code Here

 
  public static DirectoryListing convert(DirectoryListingProto dl) {
    if (dl == null)
      return null;
    List<HdfsFileStatusProto> partList =  dl.getPartialListingList();
    return new DirectoryListing(
        partList.isEmpty() ? new HdfsLocatedFileStatus[0]
          : PBHelper.convert(
              partList.toArray(new HdfsFileStatusProto[partList.size()])),
        dl.getRemainingEntries());
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.protocol.DirectoryListing

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.