Package org.apache.hadoop.io.file.tfile.TFile

Examples of org.apache.hadoop.io.file.tfile.TFile.Reader


    reader.close();
  }

  private void readValueBeforeKey(int recordIndex)
      throws IOException {
    Reader reader =
        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner =
        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
            .getBytes(), null);

    try {
      byte[] vbuf = new byte[BUF_SIZE];
      int vlen = scanner.entry().getValueLength();
      scanner.entry().getValue(vbuf);
      Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);

      byte[] kbuf = new byte[BUF_SIZE];
      int klen = scanner.entry().getKeyLength();
      scanner.entry().getKey(kbuf);
      Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
          recordIndex));
    } finally {
      scanner.close();
      reader.close();
    }
  }
View Full Code Here


    }
  }

  private void readKeyWithoutValue(int recordIndex)
      throws IOException {
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner =
        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
            .getBytes(), null);

    try {
      // read the indexed key
      byte[] kbuf1 = new byte[BUF_SIZE];
      int klen1 = scanner.entry().getKeyLength();
      scanner.entry().getKey(kbuf1);
      Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
          recordIndex));

      if (scanner.advance() && !scanner.atEnd()) {
        // read the next key following the indexed
        byte[] kbuf2 = new byte[BUF_SIZE];
        int klen2 = scanner.entry().getKeyLength();
        scanner.entry().getKey(kbuf2);
        Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
            recordIndex + 1));
      }
    } finally {
      scanner.close();
      reader.close();
    }
  }
View Full Code Here

    }
  }

  private void readValueWithoutKey(int recordIndex)
      throws IOException {
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

    Scanner scanner =
        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
            .getBytes(), null);

    byte[] vbuf1 = new byte[BUF_SIZE];
    int vlen1 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf1);
    Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);

    if (scanner.advance() && !scanner.atEnd()) {
      byte[] vbuf2 = new byte[BUF_SIZE];
      int vlen2 = scanner.entry().getValueLength();
      scanner.entry().getValue(vbuf2);
      Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
          + (recordIndex + 1));
    }

    scanner.close();
    reader.close();
  }
View Full Code Here

    scanner.close();
    reader.close();
  }

  private void readKeyManyTimes(int recordIndex) throws IOException {
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);

    Scanner scanner =
        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
            .getBytes(), null);

    // read the indexed key
    byte[] kbuf1 = new byte[BUF_SIZE];
    int klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        recordIndex));

    klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        recordIndex));

    klen1 = scanner.entry().getKeyLength();
    scanner.entry().getKey(kbuf1);
    Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
        recordIndex));

    scanner.close();
    reader.close();
  }
View Full Code Here

        final Path file = split.getPath();

        // open the file and seek to the start of the split
        FileSystem fs = file.getFileSystem(job);
        fileIn = fs.open(split.getPath());
        reader = new Reader(fileIn, fs.getFileStatus(file).getLen(), job);
        scanner = reader.createScannerByByteRange(start, split.getLength());
    }
View Full Code Here

  void readFile() throws IOException {
    long fileLength = fs.getFileStatus(path).getLen();
    int numSplit = 10;
    long splitSize = fileLength / numSplit + 1;

    Reader reader =
        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    long offset = 0;
    long rowCount = 0;
    BytesWritable key, value;
    for (int i = 0; i < numSplit; ++i, offset += splitSize) {
      Scanner scanner = reader.createScanner(offset, splitSize);
      int count = 0;
      key = new BytesWritable();
      value = new BytesWritable();
      while (!scanner.atEnd()) {
        scanner.entry().get(key, value);
        ++count;
        scanner.advance();
      }
      scanner.close();
      Assert.assertTrue(count > 0);
      rowCount += count;
    }
    Assert.assertEquals(rowCount, reader.getEntryCount());
    reader.close();
  }
View Full Code Here

  public void testNoDataEntry() throws IOException {
    if (skip)
      return;
    closeOutput();

    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.assertTrue(reader.isSorted());
    Scanner scanner = reader.createScanner();
    Assert.assertTrue(scanner.atEnd());
    scanner.close();
    reader.close();
  }
View Full Code Here

 
  public void testLocate() throws IOException {
    if (skip)
      return;
    writeRecords(3 * records1stBlock);
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScanner();
    Location loc2 =
        locate(scanner, composeSortedKey(KEY, 3 * records1stBlock, 2)
            .getBytes());
    Location locLastIn1stBlock =
        locate(scanner, composeSortedKey(KEY, 3 * records1stBlock,
            records1stBlock - 1).getBytes());
    Location locFirstIn2ndBlock =
        locate(scanner, composeSortedKey(KEY, 3 * records1stBlock,
            records1stBlock).getBytes());
    Location locX = locate(scanner, "keyX".getBytes());
    Assert.assertEquals(scanner.endLocation, locX);
    scanner.close();
    reader.close();
  }
View Full Code Here

  }

  public void testFailureWriterNotClosed() throws IOException {
    if (skip)
      return;
    Reader reader = null;
    try {
      reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
      Assert.fail("Cannot read before closing the writer.");
    }
    catch (IOException e) {
      // noop, expecting exceptions
    }
    finally {
      if (reader != null) {
        reader.close();
      }
    }
  }
View Full Code Here

    outMeta.write(123);
    outMeta.write("foo".getBytes());
    outMeta.close();
    closeOutput();

    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    DataInputStream mb = reader.getMetaBlock("testX");
    Assert.assertNotNull(mb);
    mb.close();
    try {
      DataInputStream mbBad = reader.getMetaBlock("testY");
      Assert.assertNull(mbBad);
      Assert.fail("Error on handling non-existent metablocks.");
    }
    catch (Exception e) {
      // noop, expecting exceptions
    }
    reader.close();
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.file.tfile.TFile.Reader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.