Package de.lmu.ifi.dbs.elki.logging.progress

Examples of de.lmu.ifi.dbs.elki.logging.progress.StepProgress


   * calling {@code #doRunInTime(Database)} and adds a {@link LOFKNNListener} to
   * the preprocessors.
   */
  @Override
  public OutlierResult run(Relation<O> relation) {
    StepProgress stepprog = logger.isVerbose() ? new StepProgress("OnlineLOF", 3) : null;

    Pair<Pair<KNNQuery<O, D>, KNNQuery<O, D>>, Pair<RKNNQuery<O, D>, RKNNQuery<O, D>>> queries = getKNNAndRkNNQueries(relation, stepprog);
    KNNQuery<O, D> kNNRefer = queries.getFirst().getFirst();
    KNNQuery<O, D> kNNReach = queries.getFirst().getSecond();
    RKNNQuery<O, D> rkNNRefer = queries.getSecond().getFirst();
View Full Code Here


     * @param updates2 the ids of the updated neighborhood w.r.t. the
     *        reachability distance function
     * @param lofResult the result of the former LOF run
     */
    private void kNNsInserted(DBIDs insertions, DBIDs updates1, DBIDs updates2, LOFResult<O, D> lofResult) {
      StepProgress stepprog = logger.isVerbose() ? new StepProgress(3) : null;

      // recompute lrds
      if(stepprog != null) {
        stepprog.beginStep(1, "Recompute LRDs.", logger);
      }
      ArrayDBIDs lrd_ids = DBIDUtil.ensureArray(DBIDUtil.union(insertions, updates2));
      List<List<DistanceResultPair<D>>> reachDistRKNNs = lofResult.getRkNNReach().getRKNNForBulkDBIDs(lrd_ids, k);
      ArrayDBIDs affected_lrd_id_candidates = mergeIDs(reachDistRKNNs, lrd_ids);
      ArrayModifiableDBIDs affected_lrd_ids = DBIDUtil.newArray(affected_lrd_id_candidates.size());
      WritableDoubleDataStore new_lrds = computeLRDs(affected_lrd_id_candidates, lofResult.getKNNReach());
      for(DBID id : affected_lrd_id_candidates) {
        double new_lrd = new_lrds.doubleValue(id);
        double old_lrd = lofResult.getLrds().doubleValue(id);
        if(Double.isNaN(old_lrd) || old_lrd != new_lrd) {
          lofResult.getLrds().putDouble(id, new_lrd);
          affected_lrd_ids.add(id);
        }
      }

      // recompute lofs
      if(stepprog != null) {
        stepprog.beginStep(2, "Recompute LOFS.", logger);
      }
      List<List<DistanceResultPair<D>>> primDistRKNNs = lofResult.getRkNNRefer().getRKNNForBulkDBIDs(affected_lrd_ids, k);
      ArrayDBIDs affected_lof_ids = mergeIDs(primDistRKNNs, affected_lrd_ids, insertions, updates1);
      recomputeLOFs(affected_lof_ids, lofResult);

      // fire result changed
      if(stepprog != null) {
        stepprog.beginStep(3, "Inform listeners.", logger);
      }
      lofResult.getResult().getHierarchy().resultChanged(lofResult.getResult());

      if(stepprog != null) {
        stepprog.setCompleted(logger);
      }
    }
View Full Code Here

     * @param updates2 the ids of the updated neighborhood w.r.t. the
     *        reachability distance function
     * @param lofResult the result of the former LOF run
     */
    private void kNNsRemoved(DBIDs deletions, DBIDs updates1, DBIDs updates2, LOFResult<O, D> lofResult) {
      StepProgress stepprog = logger.isVerbose() ? new StepProgress(4) : null;

      // delete lrds and lofs
      if(stepprog != null) {
        stepprog.beginStep(1, "Delete old LRDs and LOFs.", logger);
      }
      for(DBID id : deletions) {
        lofResult.getLrds().delete(id);
        lofResult.getLofs().delete(id);
      }

      // recompute lrds
      if(stepprog != null) {
        stepprog.beginStep(2, "Recompute LRDs.", logger);
      }
      ArrayDBIDs lrd_ids = DBIDUtil.ensureArray(updates2);
      List<List<DistanceResultPair<D>>> reachDistRKNNs = lofResult.getRkNNReach().getRKNNForBulkDBIDs(lrd_ids, k);
      ArrayDBIDs affected_lrd_id_candidates = mergeIDs(reachDistRKNNs, lrd_ids);
      ArrayModifiableDBIDs affected_lrd_ids = DBIDUtil.newArray(affected_lrd_id_candidates.size());
      WritableDoubleDataStore new_lrds = computeLRDs(affected_lrd_id_candidates, lofResult.getKNNReach());
      for(DBID id : affected_lrd_id_candidates) {
        double new_lrd = new_lrds.doubleValue(id);
        double old_lrd = lofResult.getLrds().doubleValue(id);
        if(old_lrd != new_lrd) {
          lofResult.getLrds().putDouble(id, new_lrd);
          affected_lrd_ids.add(id);
        }
      }

      // recompute lofs
      if(stepprog != null) {
        stepprog.beginStep(3, "Recompute LOFS.", logger);
      }
      List<List<DistanceResultPair<D>>> primDistRKNNs = lofResult.getRkNNRefer().getRKNNForBulkDBIDs(affected_lrd_ids, k);
      ArrayDBIDs affected_lof_ids = mergeIDs(primDistRKNNs, affected_lrd_ids, updates1);
      recomputeLOFs(affected_lof_ids, lofResult);

      // fire result changed
      if(stepprog != null) {
        stepprog.beginStep(4, "Inform listeners.", logger);
      }
      lofResult.getResult().getHierarchy().resultChanged(lofResult.getResult());

      if(stepprog != null) {
        stepprog.setCompleted(logger);
      }
    }
View Full Code Here

   * @return Clustering result
   */
  public Clustering<CorrelationModel<V>> run(Relation<V> relation) throws IllegalStateException {
    final int dimensionality = DatabaseUtil.dimensionality(relation);

    StepProgress stepprog = logger.isVerbose() ? new StepProgress(3) : null;

    // run COPAC
    if(stepprog != null) {
      stepprog.beginStep(1, "Preprocessing local correlation dimensionalities and partitioning data", logger);
    }
    Clustering<Model> copacResult = copacAlgorithm.run(relation);

    DistanceQuery<V, IntegerDistance> query = copacAlgorithm.getPartitionDistanceQuery();

    // extract correlation clusters
    if(stepprog != null) {
      stepprog.beginStep(2, "Extract correlation clusters", logger);
    }
    SortedMap<Integer, List<Cluster<CorrelationModel<V>>>> clusterMap = extractCorrelationClusters(copacResult, relation, dimensionality);
    if(logger.isDebugging()) {
      StringBuffer msg = new StringBuffer("Step 2: Extract correlation clusters...");
      for(Integer corrDim : clusterMap.keySet()) {
        List<Cluster<CorrelationModel<V>>> correlationClusters = clusterMap.get(corrDim);
        msg.append("\n\ncorrDim ").append(corrDim);
        for(Cluster<CorrelationModel<V>> cluster : correlationClusters) {
          msg.append("\n  cluster ").append(cluster).append(", ids: ").append(cluster.getIDs().size());
          // .append(", level: ").append(cluster.getLevel()).append(", index: ").append(cluster.getLevelIndex());
          // msg.append("\n  basis " +
          // cluster.getPCA().getWeakEigenvectors().toString("    ", NF) +
          // "  ids " + cluster.getIDs().size());
        }
      }
      logger.debugFine(msg.toString());
    }
    if(logger.isVerbose()) {
      int clusters = 0;
      for(List<Cluster<CorrelationModel<V>>> correlationClusters : clusterMap.values()) {
        clusters += correlationClusters.size();
      }
      logger.verbose(clusters + " clusters extracted.");
    }

    // build hierarchy
    if(stepprog != null) {
      stepprog.beginStep(3, "Building hierarchy", logger);
    }
    buildHierarchy(clusterMap, query);
    if(logger.isDebugging()) {
      StringBuffer msg = new StringBuffer("Step 3: Build hierarchy");
      for(Integer corrDim : clusterMap.keySet()) {
        List<Cluster<CorrelationModel<V>>> correlationClusters = clusterMap.get(corrDim);
        for(Cluster<CorrelationModel<V>> cluster : correlationClusters) {
          msg.append("\n  cluster ").append(cluster).append(", ids: ").append(cluster.getIDs().size());
          // .append(", level: ").append(cluster.getLevel()).append(", index: ").append(cluster.getLevelIndex());
          for(int i = 0; i < cluster.getParents().size(); i++) {
            msg.append("\n   parent ").append(cluster.getParents().get(i));
          }
          for(int i = 0; i < cluster.numChildren(); i++) {
            msg.append("\n   child ").append(cluster.getChildren().get(i));
          }
        }
      }
      logger.debugFine(msg.toString());
    }
    if(stepprog != null) {
      stepprog.setCompleted(logger);
    }

    Clustering<CorrelationModel<V>> result = new Clustering<CorrelationModel<V>>("ERiC clustering", "eric-clustering");
    for(Cluster<CorrelationModel<V>> rc : clusterMap.get(clusterMap.lastKey())) {
      result.addCluster(rc);
View Full Code Here

TOP

Related Classes of de.lmu.ifi.dbs.elki.logging.progress.StepProgress

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.