Package org.apache.phoenix.schema.tuple

Examples of org.apache.phoenix.schema.tuple.ResultTuple


                // Build Map with evaluated hash key as key and row as value
                for (int i = 0; i < nRows; i++) {
                    int resultSize = (int)Bytes.readVLong(hashCacheByteArray, offset);
                    offset += WritableUtils.decodeVIntSize(hashCacheByteArray[offset]);
                    ImmutableBytesWritable value = new ImmutableBytesWritable(hashCacheByteArray,offset,resultSize);
                    Tuple result = new ResultTuple(new Result(value));
                    ImmutableBytesPtr key = TupleUtil.getConcatenatedValue(result, onExpressions);
                    List<Tuple> tuples = hashCacheMap.get(key);
                    if (tuples == null) {
                        tuples = new LinkedList<Tuple>();
                        hashCacheMap.put(key, tuples);
View Full Code Here


   
    private void processResults(List<KeyValue> result, boolean hasBatchLimit) throws IOException {
        if (result.isEmpty())
            return;
       
        Tuple tuple = new ResultTuple(new Result(result));
        if (joinInfo == null || joinInfo.forceProjection()) {
            tuple = projector.projectResults(tuple);
        }
        if (joinInfo == null) {
            resultQueue.offer(tuple);
View Full Code Here

    public Tuple next() throws SQLException {
        try {
            Result result = scanner.next();
            // TODO: use ResultTuple.setResult(result)
            // Need to create a new one if holding on to it (i.e. OrderedResultIterator)
            return result == null ? null : new ResultTuple(result);
        } catch (IOException e) {
            throw ServerUtil.parseServerException(e);
        }
    }
View Full Code Here

            }
           
            byte[] rb = new byte[length];
            readBuffer.get(rb);
            Result result = new Result(new ImmutableBytesWritable(rb));
            ResultTuple rt = new ResultTuple(result);
            int sortKeySize = readBuffer.getInt();
            ImmutableBytesWritable[] sortKeys = new ImmutableBytesWritable[sortKeySize];
            for (int i = 0; i < sortKeySize; i++) {
                int contentLength = readBuffer.getInt();
                if (contentLength > 0) {
View Full Code Here

            }
            int resultSize = ByteUtil.vintFromBytes(bytes, offset);
            offset += WritableUtils.getVIntSize(resultSize);
            ImmutableBytesWritable value = new ImmutableBytesWritable(bytes,offset,resultSize);
            offset += resultSize;
            Tuple result = new ResultTuple(new Result(value));
            return next = result;
        }
View Full Code Here

                    return next;
                }
                offset += bytesRead;
                totalBytesRead += bytesRead;
            }
            next = new ResultTuple(new Result(new ImmutableBytesWritable(buffer,0,length)));
            return next;
        }
View Full Code Here

            if (hasLimit)
                throw new UnsupportedOperationException("Cannot support join operations in scans with limit");
           
            int count = joinInfo.getJoinIds().length;
            List<Tuple>[] tuples = new List[count];
            Tuple tuple = new ResultTuple(new Result(result));
            boolean cont = true;
            for (int i = 0; i < count; i++) {
                ImmutableBytesPtr key = TupleUtil.getConcatenatedValue(tuple, joinInfo.getJoinExpressions()[i]);
                HashCache hashCache = (HashCache)cache.getServerCache(joinInfo.getJoinIds()[i]);
                tuples[i] = hashCache.get(key);
View Full Code Here

                // Build Map with evaluated hash key as key and row as value
                for (int i = 0; i < nRows; i++) {
                    int resultSize = (int)Bytes.readVLong(hashCacheByteArray, offset);
                    offset += WritableUtils.decodeVIntSize(hashCacheByteArray[offset]);
                    ImmutableBytesWritable value = new ImmutableBytesWritable(hashCacheByteArray,offset,resultSize);
                    Tuple result = new ResultTuple(new Result(value));
                    ImmutableBytesPtr key = new ImmutableBytesPtr(TupleUtil.getConcatenatedValue(result, onExpressions));
                    List<Tuple> tuples = hashCacheMap.get(key);
                    if (tuples == null) {
                        tuples = new ArrayList<Tuple>(1);
                        hashCacheMap.put(key, tuples);
View Full Code Here

                    return next;
                }
                offset += bytesRead;
                totalBytesRead += bytesRead;
            }
            next = new ResultTuple(new Result(new ImmutableBytesWritable(buffer,0,length)));
            return next;
        }
View Full Code Here

            }
           
            byte[] rb = new byte[length];
            readBuffer.get(rb);
            Result result = new Result(new ImmutableBytesWritable(rb));
            ResultTuple rt = new ResultTuple(result);
            int sortKeySize = readBuffer.getInt();
            ImmutableBytesWritable[] sortKeys = new ImmutableBytesWritable[sortKeySize];
            for (int i = 0; i < sortKeySize; i++) {
                int contentLength = readBuffer.getInt();
                if (contentLength > 0) {
View Full Code Here

TOP

Related Classes of org.apache.phoenix.schema.tuple.ResultTuple

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.