Package com.facebook.presto.spi

Examples of com.facebook.presto.spi.ColumnHandle


                    }

                    LinkedHashMap<String, String> keys = Warehouse.makeSpecFromName(partitionId);
                    ImmutableMap.Builder<ColumnHandle, Comparable<?>> builder = ImmutableMap.builder();
                    for (Entry<String, String> entry : keys.entrySet()) {
                        ColumnHandle columnHandle = columnsByName.get(entry.getKey());
                        checkArgument(columnHandle != null, "Invalid partition key %s in partition %s", entry.getKey(), partitionId);
                        checkArgument(columnHandle instanceof HiveColumnHandle, "columnHandle is not an instance of HiveColumnHandle");
                        HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnHandle;

                        String value = entry.getValue();
View Full Code Here


    }

    private StageExecutionPlan createTableScanPlan(String planId, MetadataManager metadata, int splitCount)
    {
        TableHandle tableHandle = metadata.getTableHandle(new QualifiedTableName("default", "default", DualMetadata.NAME)).get();
        ColumnHandle columnHandle = metadata.getColumnHandle(tableHandle, DualMetadata.COLUMN_NAME).get();
        Symbol symbol = new Symbol(DualMetadata.COLUMN_NAME);

        // table scan with splitCount splits
        Split split = new DualSplit(HostAddress.fromString("127.0.0.1"));
        PlanNodeId tableScanNodeId = new PlanNodeId(planId);
View Full Code Here

        ImmutableList.Builder<BlockIterable> sourcesBuilder = ImmutableList.builder();
        ColumnFileHandle.Builder builder = ColumnFileHandle.builder(shardUuid);

        for (Map.Entry<ColumnHandle, File> entry : columnFileHandle.getFiles().entrySet()) {
            File file = entry.getValue();
            ColumnHandle columnHandle = entry.getKey();

            if (file.length() > 0) {
                Slice slice = mappedFileCache.getUnchecked(file.getAbsoluteFile());
                checkState(file.length() == slice.length(), "File %s, length %s was mapped to Slice length %s", file.getAbsolutePath(), file.length(), slice.length());
                // Compute optimal encoding from stats
View Full Code Here

                    throws Exception
            {
                StorageManagerDao dao = handle.attach(StorageManagerDao.class);

                for (Map.Entry<ColumnHandle, File> entry : columnFileHandle.getFiles().entrySet()) {
                    ColumnHandle columnHandle = entry.getKey();
                    File file = entry.getValue();

                    checkState(columnHandle instanceof NativeColumnHandle, "Can only import in a native column");
                    long columnId = ((NativeColumnHandle) columnHandle).getColumnId();
                    String filename = file.getName();
View Full Code Here

            for (ColumnMetadata column : sourceTableMetadata.getColumns()) {
                Field field = Field.newQualified(sourceTable.asQualifiedName(), Optional.of(column.getName()), Type.fromRaw(column.getType()));
                Symbol symbol = symbolAllocator.newSymbol(field);

                inputColumnsBuilder.put(symbol, sourceTableColumns.get(column.getName()));
                ColumnHandle targetColumnHandle = targetTableColumns.get(column.getName());
                fields.add(field);
                columnHandleBuilder.add(targetColumnHandle);
                outputSymbolsBuilder.add(symbol);
            }

            ImmutableList<Symbol> outputSymbols = outputSymbolsBuilder.build();
            plan = new RelationPlan(new TableScanNode(idAllocator.getNextId(), sourceTableHandle, outputSymbols, inputColumnsBuilder.build(), null, Optional.<GeneratedPartitions>absent()), new TupleDescriptor(fields.build()), outputSymbols);

            targetColumnHandles = columnHandleBuilder.build();
        }
        else {
            RelationPlanner planner = new RelationPlanner(analysis, symbolAllocator, idAllocator, metadata, session);
            plan = planner.process(analysis.getQuery(), null);

            // TODO: create table and periodic import in pre-execution step, not here

            // Create the destination table
            ImmutableList.Builder<ColumnMetadata> columns = ImmutableList.builder();
            for (int i = 0; i < plan.getDescriptor().getFields().size(); i++) {
                Field field = plan.getDescriptor().getFields().get(i);
                String name = field.getName().or("_field" + i);
                ColumnMetadata columnMetadata = new ColumnMetadata(name, field.getType().getColumnType(), i, false);
                columns.add(columnMetadata);
            }

            TableMetadata tableMetadata = createTableMetadata(destination, columns.build());
            targetTable = metadata.createTable(destination.getCatalogName(), tableMetadata);

            // get the column handles for the destination table
            Map<String, ColumnHandle> columnHandleIndex = metadata.getColumnHandles(targetTable);
            ImmutableList.Builder<ColumnHandle> columnHandleBuilder = ImmutableList.builder();
            for (ColumnMetadata column : tableMetadata.getColumns()) {
                columnHandleBuilder.add(columnHandleIndex.get(column.getName()));
            }
            targetColumnHandles = columnHandleBuilder.build();

            // find source table (TODO: do this in analyzer)
            QueryBody queryBody = analysis.getQuery().getQueryBody();
            checkState(queryBody instanceof QuerySpecification, "Query is not a simple select statement");
            List<Relation> relations = ((QuerySpecification) queryBody).getFrom();
            checkState(relations.size() == 1, "Query has more than one source table");
            Relation relation = Iterables.getOnlyElement(relations);
            checkState(relation instanceof Table, "FROM clause is not a simple table name");
            QualifiedTableName sourceTable = MetadataUtil.createQualifiedTableName(session, ((Table) relation).getName());

            // create source table and optional import information
            storageManager.insertTableSource(((NativeTableHandle) targetTable), sourceTable);

            // if a refresh is present, create a periodic import for this table
            if (analysis.getRefreshInterval().isPresent()) {
                PeriodicImportJob job = PeriodicImportJob.createJob(sourceTable, destination, analysis.getRefreshInterval().get());
                periodicImportManager.insertJob(job);
            }
        }

        // compute input symbol <-> column mappings
        ImmutableMap.Builder<Symbol, ColumnHandle> mappings = ImmutableMap.builder();

        for (int i = 0; i < targetColumnHandles.size(); i++) {
            ColumnHandle column = targetColumnHandles.get(i);
            Symbol symbol = plan.getSymbol(i);
            mappings.put(symbol, column);
        }

        // create writer node
View Full Code Here

        Map<ColumnHandle, String> columnHandles = ImmutableBiMap.copyOf(metadata.getColumnHandles(tableHandle.get())).inverse();
        PartitionResult partitionResult = splitManager.getPartitions(tableHandle.get(), Optional.<TupleDomain>absent());

        for (Partition partition : partitionResult.getPartitions()) {
            for (Entry<ColumnHandle, Comparable<?>> entry : partition.getTupleDomain().extractFixedValues().entrySet()) {
                ColumnHandle columnHandle = entry.getKey();
                String columnName = columnHandles.get(columnHandle);
                String value = entry.getValue() != null ? String.valueOf(entry.getValue()) : null;
                table.add(
                        catalogName,
                        tableName.getSchemaName(),
View Full Code Here

            for (ColumnMetadata column : sourceTableMetadata.getColumns()) {
                Field field = Field.newQualified(sourceTable.asQualifiedName(), Optional.of(column.getName()), Type.fromRaw(column.getType()));
                Symbol symbol = symbolAllocator.newSymbol(field);

                inputColumnsBuilder.put(symbol, sourceTableColumns.get(column.getName()));
                ColumnHandle targetColumnHandle = targetTableColumns.get(column.getName());
                fields.add(field);
                columnHandleBuilder.add(targetColumnHandle);
                outputSymbolsBuilder.add(symbol);
            }

            ImmutableList<Symbol> outputSymbols = outputSymbolsBuilder.build();
            plan = new RelationPlan(new TableScanNode(idAllocator.getNextId(), sourceTableHandle, outputSymbols, inputColumnsBuilder.build(), null, Optional.<GeneratedPartitions>absent()), new TupleDescriptor(fields.build()), outputSymbols);

            targetColumnHandles = columnHandleBuilder.build();
        }
        else {
            RelationPlanner planner = new RelationPlanner(analysis, symbolAllocator, idAllocator, metadata, session);
            plan = planner.process(analysis.getQuery(), null);

            // TODO: create table and periodic import in pre-execution step, not here

            // Create the destination table
            ImmutableList.Builder<ColumnMetadata> columns = ImmutableList.builder();
            for (int i = 0; i < plan.getDescriptor().getFields().size(); i++) {
                Field field = plan.getDescriptor().getFields().get(i);
                String name = field.getName().or("_field" + i);
                ColumnMetadata columnMetadata = new ColumnMetadata(name, field.getType().getColumnType(), i, false);
                columns.add(columnMetadata);
            }

            TableMetadata tableMetadata = createTableMetadata(destination, columns.build());
            targetTable = metadata.createTable(destination.getCatalogName(), tableMetadata);

            // get the column handles for the destination table
            Map<String, ColumnHandle> columnHandleIndex = metadata.getColumnHandles(targetTable);
            ImmutableList.Builder<ColumnHandle> columnHandleBuilder = ImmutableList.builder();
            for (ColumnMetadata column : tableMetadata.getColumns()) {
                columnHandleBuilder.add(columnHandleIndex.get(column.getName()));
            }
            targetColumnHandles = columnHandleBuilder.build();

            // find source table (TODO: do this in analyzer)
            QueryBody queryBody = analysis.getQuery().getQueryBody();
            checkState(queryBody instanceof QuerySpecification, "Query is not a simple select statement");
            List<Relation> relations = ((QuerySpecification) queryBody).getFrom();
            checkState(relations.size() == 1, "Query has more than one source table");
            Relation relation = Iterables.getOnlyElement(relations);
            checkState(relation instanceof Table, "FROM clause is not a simple table name");
            QualifiedTableName sourceTable = MetadataUtil.createQualifiedTableName(session, ((Table) relation).getName());

            // create source table and optional import information
            storageManager.insertTableSource(((NativeTableHandle) targetTable), sourceTable);

            // if a refresh is present, create a periodic import for this table
            if (analysis.getRefreshInterval().isPresent()) {
                PeriodicImportJob job = PeriodicImportJob.createJob(sourceTable, destination, analysis.getRefreshInterval().get());
                periodicImportManager.insertJob(job);
            }
        }

        // compute input symbol <-> column mappings
        ImmutableMap.Builder<Symbol, ColumnHandle> mappings = ImmutableMap.builder();

        for (int i = 0; i < targetColumnHandles.size(); i++) {
            ColumnHandle column = targetColumnHandles.get(i);
            Symbol symbol = plan.getSymbol(i);
            mappings.put(symbol, column);
        }

        // create writer node
View Full Code Here

            ImmutableMap.Builder<Symbol, ColumnHandle> newAssignmentsBuilder = ImmutableMap.builder();
            for (Map.Entry<Symbol, ColumnHandle> assignmentEntry : assignments.entrySet()) {
                ColumnMetadata originalColumn = metadata.getColumnMetadata(tableHandle, assignmentEntry.getValue());

                ColumnHandle aliasedColumnHandle = lookupColumns.get(originalColumn.getName());
                checkState(aliasedColumnHandle != null, "no matching column for original column %s found!", originalColumn);
                newAssignmentsBuilder.put(assignmentEntry.getKey(), aliasedColumnHandle);
            }

            return new TableScanNode(node.getId(), aliasTableHandle.get(), node.getOutputSymbols(), newAssignmentsBuilder.build(), node.getOriginalConstraint(), node.getGeneratedPartitions());
View Full Code Here

                    }

                    LinkedHashMap<String, String> keys = Warehouse.makeSpecFromName(partitionId);
                    ImmutableMap.Builder<ColumnHandle, Object> builder = ImmutableMap.builder();
                    for (Entry<String, String> entry : keys.entrySet()) {
                        ColumnHandle columnHandle = columnsByName.get(entry.getKey());
                        checkArgument(columnHandle != null, "Invalid partition key %s in partition %s", entry.getKey(), partitionId);
                        checkArgument(columnHandle instanceof HiveColumnHandle, "columnHandle is not an instance of HiveColumnHandle");
                        HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnHandle;

                        String value = entry.getValue();
View Full Code Here

            throws Exception
    {
        TableHandle tableHandle = getTableHandle(tableOfflinePartition);
        assertNotNull(tableHandle);

        ColumnHandle dsColumn = metadata.getColumnHandle(tableHandle, "ds");
        assertNotNull(dsColumn);

        TupleDomain tupleDomain = TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of(dsColumn, Domain.singleValue("2012-12-30")));
        PartitionResult partitionResult = splitManager.getPartitions(tableHandle, tupleDomain);
        for (Partition partition : partitionResult.getPartitions()) {
View Full Code Here

TOP

Related Classes of com.facebook.presto.spi.ColumnHandle

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.