Package com.carrotsearch.hppc

Examples of com.carrotsearch.hppc.IntOpenHashSet


      getSubNodeCollectionAux(col, arc.getNode());
  }
 
  public IntOpenHashSet getSubIdSet()
  {
    IntOpenHashSet set = new IntOpenHashSet();
   
    getSubIdSetAux(set, this);
    return set;
  }
View Full Code Here


   * The array is sorted in ascending order.
   * @return an array of IDs from the subtree of this node, including the ID of this node.
   */
  public int[] getSubIdArray()
  {
    IntOpenHashSet set = getSubIdSet();
    int[] list = set.toArray();
    Arrays.sort(list);
   
    return list;
  }
View Full Code Here

   
    l_branches = Lists.newArrayList();
     l_states   = Lists.newArrayList();
    l_2ndHeads = Lists.newArrayList();
     n_2ndPos   = new double[t_size];
     s_reduce   = new IntOpenHashSet();
    
     int i; for (i=0; i<t_size; i++)
       l_2ndHeads.add(new ArrayList<DEPHead>());
  }
View Full Code Here

   
    l_branches = Lists.newArrayList();
     l_states   = Lists.newArrayList();
    l_2ndHeads = Lists.newArrayList();
     n_2ndPos   = new double[t_size];
     s_reduce   = new IntOpenHashSet();
    
     int i; for (i=0; i<t_size; i++)
       l_2ndHeads.add(new ArrayList<DEPHead>());
  }
View Full Code Here

    final ErrorCollector collector = new ErrorCollectorImpl();
    final List<TransferPair> transfers = Lists.newArrayList();

    final ClassGenerator<Projector> cg = CodeGenerator.getRoot(Projector.TEMPLATE_DEFINITION, context.getFunctionRegistry());

    IntOpenHashSet transferFieldIds = new IntOpenHashSet();

    boolean isAnyWildcard = false;
  
    ClassifierResult result = new ClassifierResult()
    boolean classify = isClassificationNeeded(exprs);
   
    for(int i = 0; i < exprs.size(); i++){
      final NamedExpression namedExpression = exprs.get(i);
      result.clear();
     
      if (classify && namedExpression.getExpr() instanceof SchemaPath) {
        classifyExpr(namedExpression, incoming, result);
    
        if (result.isStar) {
          isAnyWildcard = true;
          Integer value = result.prefixMap.get(result.prefix);
          if (value != null && value.intValue() == 1) {
            int k = 0;
            for(VectorWrapper<?> wrapper : incoming) {
              ValueVector vvIn = wrapper.getValueVector();
              SchemaPath originalPath = vvIn.getField().getPath();
              if (k > result.outputNames.size()-1) {
                assert false;
              }
              String name = result.outputNames.get(k++)// get the renamed column names
              if (name == EMPTY_STRING) continue;
              FieldReference ref = new FieldReference(name);
              TransferPair tp = wrapper.getValueVector().getTransferPair(ref);
              transfers.add(tp);
              container.add(tp.getTo());       
            }
          } else if (value != null && value.intValue() > 1) { // subsequent wildcards should do a copy of incoming valuevectors
            int k = 0;
            for(VectorWrapper<?> wrapper : incoming) {
              ValueVector vvIn = wrapper.getValueVector();
              SchemaPath originalPath = vvIn.getField().getPath();
              if (k > result.outputNames.size()-1) {
                assert false;
              }
              String name = result.outputNames.get(k++)// get the renamed column names
              if (name == EMPTY_STRING) continue;

              final LogicalExpression expr = ExpressionTreeMaterializer.materialize(originalPath, incoming, collector, context.getFunctionRegistry() );
              if(collector.hasErrors()){
                throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
              }             

              MaterializedField outputField = MaterializedField.create(name, expr.getMajorType());
              ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
              allocationVectors.add(vv);
              TypedFieldId fid = container.add(vv);
              ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
              HoldingContainer hc = cg.addExpr(write);

              cg.getEvalBlock()._if(hc.getValue().eq(JExpr.lit(0)))._then()._return(JExpr.FALSE);
            }
          }
          continue;
        }
      }

      String outputName = getRef(namedExpression).getRootSegment().getPath();
      if (result != null && result.outputNames != null && result.outputNames.size() > 0) {
        if (result.outputNames.get(0) == EMPTY_STRING) continue;
        outputName = result.outputNames.get(0);
      }
     
      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
      final MaterializedField outputField = MaterializedField.create(outputName, expr.getMajorType());
      if(collector.hasErrors()){
        throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
      }

      // add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
      if(expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE
          && !((ValueVectorReadExpression) expr).hasReadPath()
          && !isAnyWildcard
          && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])
        ) {

        ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
        TypedFieldId id = vectorRead.getFieldId();
        ValueVector vvIn = incoming.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
        Preconditions.checkNotNull(incoming);

        TransferPair tp = vvIn.getTransferPair(getRef(namedExpression));
        transfers.add(tp);
        container.add(tp.getTo());
        transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);
        logger.debug("Added transfer for project expression.");
      } else if (expr instanceof DrillFuncHolderExpr &&
          ((DrillFuncHolderExpr) expr).isComplexWriterFuncHolder())  {
        // Need to process ComplexWriter function evaluation.
        // Lazy initialization of the list of complex writers, if not done yet.
View Full Code Here

  {
    int i, size = size();
     
    StringIntPair[]   H = new StringIntPair[size];
    List<DEPCountArc> F = new ArrayList<DEPCountArc>();
    IntOpenHashSet    T = new IntOpenHashSet();
    DEPCountArc a;
   
    StringIntPair[] t = lHeads.get(0);
     
    for (i=1; i<size; i++)
      H[i] = new StringIntPair(t[i].s, t[i].i);
     
    T.add(DEPLib.ROOT_ID);
    F.addAll(getArcs(lHeads, T));
     
    while (!F.isEmpty())
    {
      UTCollection.sortReverseOrder(F);
      a = F.get(0);
     
      H[a.depId].i = a.headId;
      H[a.depId].s = a.deprel;
   
      T.add(a.depId);
      removeArcs(F, a.depId);
     
      F.addAll(getArcs(lHeads, T));     
    }
     
View Full Code Here

  }
 
  /** Called by {@link DEPTree#projectivize()}. */
  private DEPNode getSmallestNonProjectiveArc(IntArrayList ids)
  {
    IntOpenHashSet remove = new IntOpenHashSet();
    DEPNode wk, nonProj = null;
    int np, max = 0;
   
    for (IntCursor cur : ids)
    {
      wk = get(cur.value);
      np = isNonProjective(wk);
     
      if (np == 0)
      {
        remove.add(cur.value);
      }
      else if (np > max)
      {
        nonProj = wk;
        max = np;
View Full Code Here

  }
 
  @Deprecated
  private void addNonProjectiveMap(IntObjectOpenHashMap<IntOpenHashSet> map, int cIdx, int nIdx)
  {
    IntOpenHashSet set;
   
    if (map.containsKey(cIdx))
      set = map.get(cIdx);
    else
    {
      set = new IntOpenHashSet();
      map.put(cIdx, set);
    }
   
    set.add(nIdx);
  }
View Full Code Here

 
  @Deprecated
  private IntOpenHashSet getNonProjectiveMapAux(IntObjectOpenHashMap<IntOpenHashSet> map)
  {
    IntIntPair max = new IntIntPair(-1, -1);
    IntOpenHashSet set, remove;
    boolean removed;
    int[] keys;
   
    do
    {
      max.set(-1, -1);
      keys = map.keys().toArray();
      Arrays.sort(keys);
     
      for (int key : keys)
      {
        set = map.get(key);
       
        if (set.size() > max.i2)
          max.set(key, set.size());
      }
     
      removed = false;
     
      if (max.i2 > 0)
      {
        remove = new IntOpenHashSet();
       
        for (IntCursor cur : map.get(max.i1))
        {
          if (map.containsKey(cur.value))
          {
            set = map.get(cur.value);
           
            if (set.contains(max.i1))
            {
              removed = true;
              set.remove(max.i1);
              if (set.isEmpty())  remove.add(cur.value);
            }
          }
        }
       
        for (IntCursor cur : remove)
          map.remove(cur.value);
      }
    }
    while (removed);
           
    return new IntOpenHashSet(map.keys());
  }
View Full Code Here

    final List<NamedExpression> exprs = getExpressionList();
    final ErrorCollector collector = new ErrorCollectorImpl();
    final List<TransferPair> transfers = Lists.newArrayList();

    final ClassGenerator<Flattener> cg = CodeGenerator.getRoot(Flattener.TEMPLATE_DEFINITION, context.getFunctionRegistry());
    IntOpenHashSet transferFieldIds = new IntOpenHashSet();

    RepeatedVector flattenField = ((RepeatedVector) incoming.getValueAccessorById(
          incoming.getSchema().getColumn(
              incoming.getValueVectorId(
                  popConfig.getColumn()).getFieldIds()[0]).getValueClass(),
          incoming.getValueVectorId(popConfig.getColumn()).getFieldIds()).getValueVector());

    NamedExpression namedExpression = new NamedExpression(popConfig.getColumn(), new FieldReference(popConfig.getColumn()));
    LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
    ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
    TypedFieldId id = vectorRead.getFieldId();
    Preconditions.checkNotNull(incoming);

    TransferPair tp = null;
    if (flattenField instanceof RepeatedMapVector) {
      tp = ((RepeatedMapVector)flattenField).getTransferPairToSingleMap();
    } else {
      ValueVector vvIn = flattenField.getAccessor().getAllChildValues();
      tp = vvIn.getTransferPair();
    }
    transfers.add(tp);
    container.add(tp.getTo());
    transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);

    logger.debug("Added transfer for project expression.");

    ClassifierResult result = new ClassifierResult();
View Full Code Here

TOP

Related Classes of com.carrotsearch.hppc.IntOpenHashSet

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.