Package mil.nga.giat.geowave.store.index

Examples of mil.nga.giat.geowave.store.index.Index


    // This is an adapter, that is needed to describe how to persist the
    // data type passed
    final FeatureDataAdapter adapter = createDataAdapter(point);

    // This describes how to index the data
    final Index index = createSpatialIndex();

    // features require a featureID - this should be unqiue as it's a
    // foreign key on the feature
    // (i.e. sending in a new feature with the same feature id will
    // overwrite the existing feature)
View Full Code Here


    runtest();
  }
 
  private void runtest() {

    final Index index = IndexType.SPATIAL_VECTOR.createDefaultIndex();
    final WritableDataAdapter<TestGeometry> adapter = new TestGeometryAdapter();

    final Geometry testGeoFilter = factory.createPolygon(new Coordinate[] {
      new Coordinate(
          24,
View Full Code Here

    if (accumuloOptions.getVisibility() != null) {
      conf.set(
          GLOBAL_VISIBILITY_KEY,
          accumuloOptions.getVisibility());
    }
    final Index primaryIndex = accumuloOptions.getIndex(parentPlugin.getSupportedIndices());
    if (primaryIndex != null) {
      conf.set(
          PRIMARY_INDEX_ID_KEY,
          StringUtils.stringFromBinary(primaryIndex.getId().getBytes()));
    }
    final Job job = new Job(
        conf,
        getJobName());
View Full Code Here

            testEncoding.getDataId(),
            null,
            1,
            testEncoding.getCommonData(),
            testEncoding.getAdapterExtendedData()),
        new Index(
            null,
            testIndexModel));

    Assert.assertEquals(
        new String(
View Full Code Here

    // all queries will use the same instance of the dedupe filter for
    // client side filtering because the filter needs to be applied across
    // indices
    final MultiIndexDedupeFilter clientDedupeFilter = new MultiIndexDedupeFilter();
    while (indices.hasNext()) {
      final Index index = indices.next();
      final AccumuloConstraintsQuery accumuloQuery;
      if (query == null) {
        accumuloQuery = new AccumuloConstraintsQuery(
            adapterIds,
            index,
            clientDedupeFilter);
      }
      else if (query.isSupported(index)) {
        // construct the query
        accumuloQuery = new AccumuloConstraintsQuery(
            adapterIds,
            index,
            query.getIndexConstraints(index.getIndexStrategy()),
            query.createFilters(index.getIndexModel()),
            clientDedupeFilter,
            authorizations);
      }
      else {
        continue;
View Full Code Here

   * Main method to execute the MapReduce analytic.
   */
  @SuppressWarnings("deprecation")
  public int runJob()
      throws Exception {
    final Index spatialIndex = IndexType.SPATIAL_VECTOR.createDefaultIndex();

    final Configuration conf = super.getConf();

    final BasicAccumuloOperations ops = new BasicAccumuloOperations(
        zookeeper,
        instance,
        user,
        password,
        namespace);
    final AdapterStore adapterStore = new AccumuloAdapterStore(
        ops);
    final DataAdapter<?> adapter = adapterStore.getAdapter(new ByteArrayId(
        StringUtils.stringToBinary(featureType)));
    conf.set(
        GaussianCellMapper.DATA_ADAPTER_KEY,
        ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(adapter)));
    conf.setInt(
        MAX_LEVEL_KEY,
        maxLevel);
    conf.setInt(
        MIN_LEVEL_KEY,
        minLevel);
    conf.set(
        AccumuloKDEReducer.STATS_NAME_KEY,
        statsName);
    if (cqlFilter != null) {
      conf.set(
          GaussianCellMapper.CQL_FILTER_KEY,
          cqlFilter);
    }
    preJob1Setup(conf);
    final Job job = new Job(
        conf);

    job.setJarByClass(this.getClass());

    job.setJobName(getJob1Name());

    job.setMapperClass(getJob1Mapper());
    job.setCombinerClass(CellSummationCombiner.class);
    job.setReducerClass(getJob1Reducer());
    job.setMapOutputKeyClass(LongWritable.class);
    job.setMapOutputValueClass(DoubleWritable.class);
    job.setOutputKeyClass(DoubleWritable.class);
    job.setOutputValueClass(LongWritable.class);

    job.setInputFormatClass(AccumuloInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    job.setNumReduceTasks(8);
    if (cqlFilter != null) {
      final Filter filter = ECQL.toFilter(cqlFilter);
      final Geometry bbox = (Geometry) filter.accept(
          ExtractGeometryFilterVisitor.GEOMETRY_VISITOR,
          null);
      if ((bbox != null) && !bbox.equals(GeometryUtils.infinity())) {
        final Constraints c = GeometryUtils.basicConstraintsFromGeometry(bbox);
        final List<ByteArrayRange> ranges = spatialIndex.getIndexStrategy().getQueryRanges(
            c.getIndexConstraints(spatialIndex.getIndexStrategy()));

        InputFormatBase.setRanges(
            job,
            AccumuloUtils.byteArrayRangesToAccumuloRanges(ranges));
      }
      conf.set(
          GaussianCellMapper.CQL_FILTER_KEY,
          cqlFilter);
    }

    InputFormatBase.setConnectorInfo(
        job,
        user,
        new PasswordToken(
            password.getBytes()));
    InputFormatBase.setInputTableName(
        job,
        AccumuloUtils.getQualifiedTableName(
            namespace,
            StringUtils.stringFromBinary(spatialIndex.getId().getBytes())));
    InputFormatBase.setScanAuthorizations(
        job,
        new Authorizations());

    InputFormatBase.setZooKeeperInstance(
        job,
        instance,
        zookeeper);

    // we have to at least use a whole row iterator
    final IteratorSetting iteratorSettings = new IteratorSetting(
        10,
        "GEOWAVE_WHOLE_ROW_ITERATOR",
        WholeRowIterator.class);
    InputFormatBase.addIterator(
        job,
        iteratorSettings);

    final FileSystem fs = FileSystem.get(conf);
    fs.delete(
        new Path(
            "/tmp/" + namespace + "_stats_" + minLevel + "_" + maxLevel + "_" + statsName),
        true);
    FileOutputFormat.setOutputPath(
        job,
        new Path(
            "/tmp/" + namespace + "_stats_" + minLevel + "_" + maxLevel + "_" + statsName + "/basic"));

    final boolean job1Success = job.waitForCompletion(true);
    boolean job2Success = false;
    boolean postJob2Success = false;
    // Linear MapReduce job chaining
    if (job1Success) {
      final String statsNamespace = namespace + "_stats";
      final String tableName = AccumuloUtils.getQualifiedTableName(
          statsNamespace,
          StringUtils.stringFromBinary(spatialIndex.getId().getBytes()));

      conf.set(
          TABLE_NAME,
          tableName);
      setupEntriesPerLevel(
View Full Code Here

    // query the indices that are supported for this query object, and these
    // data adapter Ids

    try (CloseableIterator<Index> indices = indexStore.getIndices()) {
      while (indices.hasNext()) {
        final Index index = indices.next();
        results.add(this.query(
            adapter,
            query,
            index,
            filter,
View Full Code Here

    // query the indices that are supported for this query object, and these
    // data adapter Ids
    try (final CloseableIterator<Index> indices = indexStore.getIndices()) {
      final List<CloseableIterator<SimpleFeature>> results = new ArrayList<CloseableIterator<SimpleFeature>>();
      while (indices.hasNext()) {
        final Index index = indices.next();
        final DistributedRenderQuery accumuloQuery;
        if (query == null) {
          accumuloQuery = new DistributedRenderQuery(
              Arrays.asList(new ByteArrayId[] {
                adapter.getAdapterId()
              }),
              index,
              filter,
              adapter,
              distributedRenderer,
              authorizations);
        }
        else if (query.isSupported(index)) {
          // construct the query
          accumuloQuery = new DistributedRenderQuery(
              Arrays.asList(new ByteArrayId[] {
                adapter.getAdapterId()
              }),
              index,
              query.getIndexConstraints(index.getIndexStrategy()),
              query.createFilters(index.getIndexModel()),
              filter,
              adapter,
              distributedRenderer,
              authorizations);
        }
View Full Code Here

    // query the indices that are supported for this query object, and these
    // data adapter Ids
    try (final CloseableIterator<Index> indices = indexStore.getIndices()) {
      final List<CloseableIterator<SimpleFeature>> results = new ArrayList<CloseableIterator<SimpleFeature>>();
      while (indices.hasNext()) {
        final Index index = indices.next();
        final SpatialDecimationQuery accumuloQuery;
        if ((query == null)) {
          accumuloQuery = new SpatialDecimationQuery(
              Arrays.asList(new ByteArrayId[] {
                adapter.getAdapterId()
              }),
              index,
              width,
              height,
              pixelSize,
              filter,
              adapter,
              envelope,
              authorizations);
        }
        else if (query.isSupported(index)) {
          // construct the query
          accumuloQuery = new SpatialDecimationQuery(
              Arrays.asList(new ByteArrayId[] {
                adapter.getAdapterId()
              }),
              index,
              query.getIndexConstraints(index.getIndexStrategy()),
              query.createFilters(index.getIndexModel()),
              width,
              height,
              pixelSize,
              filter,
              adapter,
View Full Code Here

          return false;
        }
      }
      final SimpleFeature feature = dataAdapter.decode(
          encoding,
          new Index(
              null, // because we know the feature data adapter
                  // doesn't use the numeric index strategy
                  // and only the common index model to decode
                  // the simple feature, we pass along a null
                  // strategy to eliminate the necessity to
View Full Code Here

TOP

Related Classes of mil.nga.giat.geowave.store.index.Index

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.