Examples of PositionTrackingInputStream


Examples of com.starlight.io.PositionTrackingInputStream

      //noinspection unchecked
      listeners.dispatch().indexingStarting( attachment, starting_line == 0 );

      InputStream root_stream = null;
      PositionTrackingInputStream in = null;
      try {
        root_stream = openStreamForFile( file );
        in = new PositionTrackingInputStream(
          new BufferedInputStream( root_stream ) );

       
        int line = 0;
       
        // If we're skipping some data, do it now
        if ( starting_position > 0 ) {
          long actual = in.skip( starting_position );
         
          // If we weren't able to skip the desired number of bytes, we'll need
          // to do a full index because we don't know what line we're at.
          if ( actual != starting_position ) {
            // If mark is support, we can just reset to the beginning and
            // go on
            if ( in.markSupported() ) {
              in.reset(); // pop to beginning (no mark set)

              // Indicate that we're starting a full index
              //noinspection unchecked
              listeners.dispatch().indexingStarting( attachment, true );
            }
            // If it's not supported, we'll need to close the files are run
            // a new instance.
            else {
              // Close out the files
//              IOKit.close( line_reader );
              IOKit.close( in );
             
              // Run new instance
              Indexer sub = new Indexer( 0, 0 );
              sub.run();
              return;
            }
          }
          else line = starting_line;
        }
       
        // At this point, ready to do the work, so grab a lock...
        row_index_map_lock.lock();
        try {
          // If this is a full index, clear the existing map
          if ( line == 0 ) {
            row_index_map.clear();
            row_skip_mod = 1;
          }

          // This is not very efficient, but buffering messes up the position
          int bite;
          while( ( bite = in.read() ) != -1 ) {
            // If it's not a newline, ignore.
            // WARNING: this doesn't handle different line endings well
            if ( bite != '\n' ) continue;


            // NOTE: increment line right away since we're now at the end
            //       of the preceding line.
            line++;

            if ( line % row_skip_mod == 0 ) {
              row_index_map.put( line, in.position() );
//              printRowIndexMap( "after add" );
             
              if ( row_index_map.size() > max_index_size ) {
                row_skip_mod = increaseRowSkipMod( row_skip_mod, line );
                printRowIndexMap( "after grow" );
              }
            }
          }
        }
        finally {
          row_index_map_lock.unlock();
        }
       
        num_lines = line;
        last_index_size = in.position();

//        System.out.println( "  Found " + num_lines + " lines");
//        System.out.println( "  Map size: " + row_index_map.size() );
//        System.out.println( "Map: " );
//        for( int i = 0; i <= num_lines; i++ ) {
View Full Code Here

Examples of com.starlight.io.PositionTrackingInputStream

      throw new FileNotFoundException( "File \"" + file + "\" does not exist." );
    }

    FileInputStream fin = null;
    BufferedInputStream bin = null;
    PositionTrackingInputStream pin = null;
    DataInputStream din = null;
    try {
      fin = new FileInputStream( file );
      bin = new BufferedInputStream( fin );
      pin = new PositionTrackingInputStream( bin );
      din = new DataInputStream( pin );

      parse_internal( din, pin );
    }
    finally {
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.PositionTrackingInputStream

  /**
   * Process image file.
   */
  public void go() throws IOException  {
    DataInputStream in = null;
    PositionTrackingInputStream tracker = null;
    ImageLoader fsip = null;
    boolean done = false;
    try {
      tracker = new PositionTrackingInputStream(new BufferedInputStream(
               new FileInputStream(new File(inputFile))));
      in = new DataInputStream(tracker);

      int imageVersionFile = findImageVersion(in);

      fsip = ImageLoader.LoaderFactory.getLoader(imageVersionFile);

      if(fsip == null)
        throw new IOException("No image processor to read version " +
            imageVersionFile + " is available.");
      fsip.loadImage(in, processor, skipBlocks);
      done = true;
    } finally {
      if (!done) {
        LOG.error("image loading failed at offset " + tracker.getPos());
      }
      IOUtils.cleanup(LOG, in, tracker);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.PositionTrackingInputStream

  /**
   * Process image file.
   */
  public void go() throws IOException  {
    DataInputStream in = null;
    PositionTrackingInputStream tracker = null;
    ImageLoader fsip = null;
    boolean done = false;
    try {
      tracker = new PositionTrackingInputStream(new BufferedInputStream(
               new FileInputStream(new File(inputFile))));
      in = new DataInputStream(tracker);

      int imageVersionFile = findImageVersion(in);

      fsip = ImageLoader.LoaderFactory.getLoader(imageVersionFile);

      if(fsip == null)
        throw new IOException("No image processor to read version " +
            imageVersionFile + " is available.");
      fsip.loadImage(in, processor, skipBlocks);
      done = true;
    } finally {
      if (!done) {
        LOG.error("image loading failed at offset " + tracker.getPos());
      }
      IOUtils.cleanup(LOG, in, tracker);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.PositionTrackingInputStream

    }

    journalInputStream = new BookKeeperJournalInputStream(ledger,
        firstBookKeeperEntry);
    bin = new BufferedInputStream(journalInputStream);
    tracker = new PositionTrackingInputStream(bin, 0);
    DataInputStream in = new DataInputStream(tracker);
    try {
      logVersion = readLogVersion(in);
    } catch (EOFException e) {
      throw new LedgerHeaderCorruptException("No header file in the ledger");
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.PositionTrackingInputStream

    // Try to set the underlying stream to the specified position
    journalInputStream.position(position);
    // Reload the position tracker and log reader to adjust to the newly
    // refreshed position
    bin = new BufferedInputStream(journalInputStream);
    tracker = new PositionTrackingInputStream(bin, position);
    DataInputStream in = new DataInputStream(tracker);
    if (position == 0) { // If we are at the beginning, re-read the version
      logVersion = readLogVersion(in);
    }
    reader = new Reader(in, logVersion);
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.PositionTrackingInputStream

    DataInputStream in = null;
    DataOutputStream out = null;

    try {
      // setup in
      PositionTrackingInputStream ptis = new PositionTrackingInputStream(
          new FileInputStream(new File(inputFile)));
      in = new DataInputStream(ptis);

      // read header information
      int imgVersion = in.readInt();
      if (!LayoutVersion.supports(Feature.FSIMAGE_COMPRESSION, imgVersion)) {
        System.out
            .println("Image is not compressed. No output will be produced.");
        return;
      }
      int namespaceId = in.readInt();
      long numFiles = in.readLong();
      long genstamp = in.readLong();

      long imgTxId = -1;
      if (LayoutVersion.supports(Feature.STORED_TXIDS, imgVersion)) {
        imgTxId = in.readLong();
      }
      FSImageCompression compression = FSImageCompression
          .readCompressionHeader(new Configuration(), in);
      if (compression.isNoOpCompression()) {
        System.out
            .println("Image is not compressed. No output will be produced.");
        return;
      }
      in = BufferedByteInputStream.wrapInputStream(
          compression.unwrapInputStream(in), FSImage.LOAD_SAVE_BUFFER_SIZE,
          FSImage.LOAD_SAVE_CHUNK_SIZE);
      System.out.println("Starting decompression.");

      // setup output
      out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(
          outputFile)));

      // write back the uncompressed information
      out.writeInt(imgVersion);
      out.writeInt(namespaceId);
      out.writeLong(numFiles);
      out.writeLong(genstamp);
      if (LayoutVersion.supports(Feature.STORED_TXIDS, imgVersion)) {
        out.writeLong(imgTxId);
      }
      // no compression
      out.writeBoolean(false);

      // copy the data
      long size = new File(inputFile).length();
      // read in 1MB chunks
      byte[] block = new byte[1024 * 1024];
      while (true) {
        int bytesRead = in.read(block);
        if (bytesRead <= 0)
          break;
        out.write(block, 0, bytesRead);
        printProgress(ptis.getPos(), size);
      }

      out.close();

      long stop = System.currentTimeMillis();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.