Package org.apache.cassandra.io.util.DataIntegrityMetadata

Examples of org.apache.cassandra.io.util.DataIntegrityMetadata.ChecksumValidator


     */
    public void write(WritableByteChannel channel) throws IOException
    {
        long totalSize = totalSize();
        RandomAccessReader file = sstable.openDataReader();
        ChecksumValidator validator = null;
        if (new File(sstable.descriptor.filenameFor(Component.CRC)).exists())
            validator = DataIntegrityMetadata.checksumValidator(sstable.descriptor);

        transferBuffer = validator == null ? new byte[DEFAULT_CHUNK_SIZE] : new byte[validator.chunkSize];

        // setting up data compression stream
        compressedOutput = new LZFOutputStream(Channels.newOutputStream(channel));
        long progress = 0L;

        try
        {
            // stream each of the required sections of the file
            for (Pair<Long, Long> section : sections)
            {
                long start = validator == null ? section.left : validator.chunkStart(section.left);
                int skipBytes = (int) (section.left - start);
                // seek to the beginning of the section
                file.seek(start);
                if (validator != null)
                    validator.seek(start);

                // length of the section to read
                long length = section.right - start;
                // tracks write progress
                long bytesTransferred = 0;
View Full Code Here


     */
    public void write(WritableByteChannel channel) throws IOException
    {
        long totalSize = totalSize();
        RandomAccessReader file = sstable.openDataReader();
        ChecksumValidator validator = new File(sstable.descriptor.filenameFor(Component.CRC)).exists()
                                    ? DataIntegrityMetadata.checksumValidator(sstable.descriptor)
                                    : null;
        transferBuffer = validator == null ? new byte[DEFAULT_CHUNK_SIZE] : new byte[validator.chunkSize];

        // setting up data compression stream
        compressedOutput = new LZFOutputStream(Channels.newOutputStream(channel));
        long progress = 0L;

        try
        {
            // stream each of the required sections of the file
            for (Pair<Long, Long> section : sections)
            {
                long start = validator == null ? section.left : validator.chunkStart(section.left);
                int readOffset = (int) (section.left - start);
                // seek to the beginning of the section
                file.seek(start);
                if (validator != null)
                    validator.seek(start);

                // length of the section to read
                long length = section.right - start;
                // tracks write progress
                long bytesRead = 0;
View Full Code Here

     */
    public void write(WritableByteChannel channel) throws IOException
    {
        long totalSize = totalSize();
        RandomAccessReader file = sstable.openDataReader();
        ChecksumValidator validator = null;
        if (new File(sstable.descriptor.filenameFor(Component.CRC)).exists())
            validator = DataIntegrityMetadata.checksumValidator(sstable.descriptor);

        transferBuffer = validator == null ? new byte[DEFAULT_CHUNK_SIZE] : new byte[validator.chunkSize];

        // setting up data compression stream
        compressedOutput = new LZFOutputStream(Channels.newOutputStream(channel));
        long progress = 0L;

        try
        {
            // stream each of the required sections of the file
            for (Pair<Long, Long> section : sections)
            {
                long start = validator == null ? section.left : validator.chunkStart(section.left);
                int skipBytes = (int) (section.left - start);
                // seek to the beginning of the section
                file.seek(start);
                if (validator != null)
                    validator.seek(start);

                // length of the section to read
                long length = section.right - start;
                // tracks write progress
                long bytesTransferred = 0;
View Full Code Here

     */
    public void write(WritableByteChannel channel) throws IOException
    {
        long totalSize = totalSize();
        RandomAccessReader file = sstable.openDataReader();
        ChecksumValidator validator = new File(sstable.descriptor.filenameFor(Component.CRC)).exists()
                                    ? DataIntegrityMetadata.checksumValidator(sstable.descriptor)
                                    : null;
        transferBuffer = validator == null ? new byte[DEFAULT_CHUNK_SIZE] : new byte[validator.chunkSize];

        // setting up data compression stream
        compressedOutput = new LZFOutputStream(Channels.newOutputStream(channel));
        long progress = 0L;

        try
        {
            // stream each of the required sections of the file
            for (Pair<Long, Long> section : sections)
            {
                long start = validator == null ? section.left : validator.chunkStart(section.left);
                int skipBytes = (int) (section.left - start);
                // seek to the beginning of the section
                file.seek(start);
                if (validator != null)
                    validator.seek(start);

                // length of the section to read
                long length = section.right - start;
                // tracks write progress
                long bytesTransferred = 0;
View Full Code Here

     */
    public void write(WritableByteChannel channel) throws IOException
    {
        long totalSize = totalSize();
        RandomAccessReader file = sstable.openDataReader();
        ChecksumValidator validator = new File(sstable.descriptor.filenameFor(Component.CRC)).exists()
                                    ? DataIntegrityMetadata.checksumValidator(sstable.descriptor)
                                    : null;
        transferBuffer = validator == null ? new byte[DEFAULT_CHUNK_SIZE] : new byte[validator.chunkSize];

        // setting up data compression stream
        compressedOutput = new LZFOutputStream(Channels.newOutputStream(channel));
        long progress = 0L;

        try
        {
            // stream each of the required sections of the file
            for (Pair<Long, Long> section : sections)
            {
                long start = validator == null ? section.left : validator.chunkStart(section.left);
                int skipBytes = (int) (section.left - start);
                // seek to the beginning of the section
                file.seek(start);
                if (validator != null)
                    validator.seek(start);

                // length of the section to read
                long length = section.right - start;
                // tracks write progress
                long bytesTransferred = 0;
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.util.DataIntegrityMetadata.ChecksumValidator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.