Package com.browseengine.bobo.util

Examples of com.browseengine.bobo.util.BigSegmentedArray


  public void load(String fieldName, AtomicReader reader, TermListFactory<T> listFactory)
      throws IOException {
    String field = fieldName.intern();
    int maxDoc = reader.maxDoc();

    BigSegmentedArray order = this.orderArray;
    int negativeValueCount = 0;
    if (order == null) {
      int counts[] = getValueCounts(reader, field);
      order = newInstance(counts[0], maxDoc);
      negativeValueCount = counts[1];
    } else {
      // we want to reuse the memory
      negativeValueCount = getNegativeValueCount(reader, field);
      order.ensureCapacity(maxDoc); // no need to fill to 0, we are reseting the
                                    // data anyway
    }
    this.orderArray = order;

    IntArrayList minIDList = new IntArrayList();
    IntArrayList maxIDList = new IntArrayList();
    IntArrayList freqList = new IntArrayList();

    int length = maxDoc + 1;
    @SuppressWarnings("unchecked")
    TermValueList<T> list = listFactory == null ? (TermValueList<T>) new TermStringList()
        : listFactory.createTermList();

    int t = 0; // current term number
    list.add(null);
    minIDList.add(-1);
    maxIDList.add(-1);
    freqList.add(0);
    int totalFreq = 0;
    t++;
    Terms terms = reader.terms(field);
    if (terms != null) {
      TermsEnum termsEnum = terms.iterator(null);
      BytesRef text;
      while ((text = termsEnum.next()) != null) {
        // store term text
        // we expect that there is at most one term per document
        if (t >= length) throw new RuntimeException("there are more terms than "
            + "documents in field \"" + field + "\", but it's impossible to sort on "
            + "tokenized fields");
        String strText = text.utf8ToString();
        list.add(strText);
        Term term = new Term(field, strText);
        DocsEnum docsEnum = reader.termDocsEnum(term);
        // freqList.add(termEnum.docFreq()); // doesn't take into account
        // deldocs
        int minID = -1;
        int maxID = -1;
        int docID = -1;
        int df = 0;
        int valId = (t - 1 < negativeValueCount) ? (negativeValueCount - t + 1) : t;
        while ((docID = docsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
          df++;
          order.add(docID, valId);
          minID = docID;
          while (docsEnum.nextDoc() != DocsEnum.NO_MORE_DOCS) {
            docID = docsEnum.docID();
            df++;
            order.add(docID, valId);
          }
          maxID = docID;
        }
        freqList.add(df);
        totalFreq += df;
        minIDList.add(minID);
        maxIDList.add(maxID);
        t++;
      }
    }

    list.seal();
    this.valArray = list;
    this.freqs = freqList.toIntArray();
    this.minIDs = minIDList.toIntArray();
    this.maxIDs = maxIDList.toIntArray();

    int doc = 0;
    while (doc <= maxDoc && order.get(doc) != 0) {
      ++doc;
    }
    if (doc <= maxDoc) {
      this.minIDs[0] = doc;
      // Try to get the max
      doc = maxDoc;
      while (doc > 0 && order.get(doc) != 0) {
        --doc;
      }
      if (doc > 0) {
        this.maxIDs[0] = doc;
      }
View Full Code Here


    public DocComparator getComparator(AtomicReader reader, int docbase) throws IOException {
      if (!(reader instanceof BoboSegmentReader)) throw new IllegalStateException(
          "reader not instance of " + BoboSegmentReader.class);
      BoboSegmentReader boboReader = (BoboSegmentReader) reader;
      final FacetDataCache<?> dataCache = _facetHandler.getFacetData(boboReader);
      final BigSegmentedArray orderArray = dataCache.orderArray;
      return new DocComparator() {
        @Override
        public Comparable<?> value(ScoreDoc doc) {
          int index = orderArray.get(doc.doc);
          return dataCache.valArray.getComparableValue(index);
        }

        @Override
        public int compare(ScoreDoc doc1, ScoreDoc doc2) {
          return orderArray.get(doc1.doc) - orderArray.get(doc2.doc);
        }
      };
    }
View Full Code Here

      Object dataObj = boboReader.getFacetData(_timeFacetName);
      if (dataObj instanceof FacetDataCache<?>) {
        @SuppressWarnings("unchecked")
        FacetDataCache<Long> facetDataCache = (FacetDataCache<Long>) (boboReader
            .getFacetData(_timeFacetName));
        final BigSegmentedArray orderArray = facetDataCache.orderArray;
        final TermLongList termList = (TermLongList) facetDataCache.valArray;
        Explanation finalExpl = new Explanation();
        finalExpl.addDetail(innerExplaination);
        float rawScore = innerExplaination.getValue();
        long timeVal = termList.getPrimitiveValue(orderArray.get(doc));
        float timeScore = computeTimeFactor(timeVal);
        float finalScore = combineScores(timeScore, rawScore);
        finalExpl.setValue(finalScore);
        finalExpl.setDescription("final score = (time score: " + timeScore + ") * (raw score: "
            + rawScore + "), timeVal: " + timeVal);
View Full Code Here

      Object dataObj = boboReader.getFacetData(_timeFacetName);
      if (dataObj instanceof FacetDataCache<?>) {
        @SuppressWarnings("unchecked")
        FacetDataCache<Long> facetDataCache = (FacetDataCache<Long>) (boboReader
            .getFacetData(_timeFacetName));
        final BigSegmentedArray orderArray = facetDataCache.orderArray;
        final TermLongList termList = (TermLongList) facetDataCache.valArray;
        return new Scorer(innerScorer.getWeight()) {

          @Override
          public float score() throws IOException {
            float rawScore = innerScorer.score();
            long timeVal = termList.getRawValue(orderArray.get(innerScorer.docID()));
            float timeScore = computeTimeFactor(timeVal);
            return combineScores(timeScore, rawScore);
          }

          @Override
View Full Code Here

  public void load(String fieldName, IndexReader reader, TermListFactory<T> listFactory) throws IOException {
    String field = fieldName.intern();
    int maxDoc = reader.maxDoc();

    BigSegmentedArray order = this.orderArray;
    if (order == null) // we want to reuse the memory
    {
      order = newInstance(_termCountSize, maxDoc);
    } else {
      order.ensureCapacity(maxDoc); // no need to fill to 0, we are reseting the
                                    // data anyway
    }
    this.orderArray = order;
   
    IntArrayList minIDList = new IntArrayList();
    IntArrayList maxIDList = new IntArrayList();
    IntArrayList freqList = new IntArrayList();

    int length = maxDoc + 1;
    TermValueList<T> list = listFactory == null ? (TermValueList<T>) new TermStringList() : listFactory
        .createTermList();
    int negativeValueCount = getNegativeValueCount(reader, field);
   
    TermDocs termDocs = reader.termDocs();
    TermEnum termEnum = reader.terms(new Term(field, ""));
    int t = 0; // current term number

    list.add(null);
    minIDList.add(-1);
    maxIDList.add(-1);
    freqList.add(0);
    int totalFreq = 0;   
    // int df = 0;
    t++;
    try {
      do {
        Term term = termEnum.term();
        if (term == null || term.field() != field)
          break;

        if (t > order.maxValue()) {
          throw new IOException("maximum number of value cannot exceed: " + order.maxValue());
        }
        // store term text
        // we expect that there is at most one term per document
        if (t >= length)
          throw new RuntimeException("there are more terms than " + "documents in field \"" + field
              + "\", but it's impossible to sort on " + "tokenized fields");
        list.add(term.text());
        termDocs.seek(termEnum);
        // freqList.add(termEnum.docFreq()); // doesn't take into account
        // deldocs
        int minID = -1;
        int maxID = -1;
        int df = 0;
        int valId = (t - 1 < negativeValueCount) ? (negativeValueCount - t + 1) : t;
        if (termDocs.next()) {
          df++;
          int docid = termDocs.doc();
          order.add(docid, valId);
          minID = docid;
          while (termDocs.next()) {
            df++;
            docid = termDocs.doc();
            order.add(docid, valId);
          }
          maxID = docid;
        }
        freqList.add(df);
        totalFreq += df;
        minIDList.add(minID);
        maxIDList.add(maxID);

        t++;
      } while (termEnum.next());
    } finally {
      termDocs.close();
      termEnum.close();
    }
    list.seal();
    this.valArray = list;
    this.freqs = freqList.toIntArray();
    this.minIDs = minIDList.toIntArray();
    this.maxIDs = maxIDList.toIntArray();

    int doc = 0;
    while (doc <= maxDoc && order.get(doc) != 0) {
      ++doc;
    }
    if (doc <= maxDoc) {
      this.minIDs[0] = doc;
      // Try to get the max
      doc = maxDoc;
      while (doc > 0 && order.get(doc) != 0) {
        --doc;
      }
      if (doc > 0) {
        this.maxIDs[0] = doc;
      }
View Full Code Here

    public DocComparator getComparator(IndexReader reader, int docbase) throws IOException {
      if (!(reader instanceof BoboIndexReader))
        throw new IllegalStateException("reader not instance of " + BoboIndexReader.class);
      BoboIndexReader boboReader = (BoboIndexReader) reader;
      final FacetDataCache dataCache = _facetHandler.getFacetData(boboReader);
      final BigSegmentedArray orderArray = dataCache.orderArray;
      return new DocComparator() {
       
        @Override
        public Comparable value(ScoreDoc doc) {
          int index = orderArray.get(doc.doc);
          return dataCache.valArray.getComparableValue(index);         
        }

        @Override
        public int compare(ScoreDoc doc1, ScoreDoc doc2) {
          return orderArray.get(doc1.doc) - orderArray.get(doc2.doc);
        }
      };
    }
View Full Code Here

  /**
   * gets distribution of the value arrays. When predefined ranges are available, this returns distribution by predefined ranges.
   */
  @Override
  public BigSegmentedArray getCountDistribution() {
    BigSegmentedArray dist;
    if (_predefinedRangeIndexes != null) {
      dist = new LazyBigIntArray(_predefinedRangeIndexes.length);
      int n = 0;
      for (int[] range : _predefinedRangeIndexes) {
        int start = range[0];
        int end = range[1];

        int sum = 0;
        for (int i = start; i < end; ++i) {
          sum += _count.get(i);
        }
        dist.add(n++, sum);
      }
    } else {
      dist = _count;
    }

View Full Code Here

      if (_predefinedRangeIndexes != null) {
        int minCount = _ospec.getMinHitCount();
        int maxNumOfFacets = _ospec.getMaxCount();
        if (maxNumOfFacets <= 0 || maxNumOfFacets > _predefinedRangeIndexes.length) maxNumOfFacets = _predefinedRangeIndexes.length;

        BigSegmentedArray rangeCount = new LazyBigIntArray(_predefinedRangeIndexes.length);

        for (int k = 0; k < _predefinedRangeIndexes.length; ++k) {
          int count = 0;
          int idx = _predefinedRangeIndexes[k][0];
          int end = _predefinedRangeIndexes[k][1];
          while (idx <= end) {
            count += _count.get(idx++);
          }
          rangeCount.add(k, count);
        }

        List<BrowseFacet> facetColl;
        FacetSortSpec sortspec = _ospec.getOrderBy();
        if (sortspec == FacetSortSpec.OrderValueAsc) {
          facetColl = new ArrayList<BrowseFacet>(maxNumOfFacets);
          for (int k = 0; k < _predefinedRangeIndexes.length; ++k) {
            if (rangeCount.get(k) >= minCount) {
              BrowseFacet choice = new BrowseFacet(_predefinedRanges.get(k), rangeCount.get(k));
              facetColl.add(choice);
            }
            if (facetColl.size() >= maxNumOfFacets) break;
          }
        } else // if (sortspec == FacetSortSpec.OrderHitsDesc)
        {
          ComparatorFactory comparatorFactory;
          if (sortspec == FacetSortSpec.OrderHitsDesc) {
            comparatorFactory = new FacetHitcountComparatorFactory();
          } else {
            comparatorFactory = _ospec.getCustomComparatorFactory();
          }

          if (comparatorFactory == null) {
            throw new IllegalArgumentException("facet comparator factory not specified");
          }

          final IntComparator comparator = comparatorFactory.newComparator(
            new FieldValueAccessor() {
              @Override
              public String getFormatedValue(int index) {
                return _predefinedRanges.get(index);
              }

              @Override
              public Object getRawValue(int index) {
                return _predefinedRanges.getRawValue(index);
              }
            }, rangeCount);

          final int forbidden = -1;
          IntBoundedPriorityQueue pq = new IntBoundedPriorityQueue(comparator, maxNumOfFacets,
              forbidden);
          for (int i = 0; i < _predefinedRangeIndexes.length; ++i) {
            if (rangeCount.get(i) >= minCount) pq.offer(i);
          }

          int val;
          facetColl = new LinkedList<BrowseFacet>();
          while ((val = pq.pollInt()) != forbidden) {
            BrowseFacet facet = new BrowseFacet(_predefinedRanges.get(val), rangeCount.get(val));
            ((LinkedList<BrowseFacet>) facetColl).addFirst(facet);
          }
        }
        return facetColl;
      } else {
View Full Code Here

  }

  @Override
  public FacetIterator iterator() {
    if (_predefinedRanges != null) {
      BigSegmentedArray rangeCounts = new LazyBigIntArray(_predefinedRangeIndexes.length);
      for (int k = 0; k < _predefinedRangeIndexes.length; ++k) {
        int count = 0;
        int idx = _predefinedRangeIndexes[k][0];
        int end = _predefinedRangeIndexes[k][1];
        while (idx <= end) {
          count += _count.get(idx++);
        }
        rangeCounts.add(k, rangeCounts.get(k) + count);
      }
      return new DefaultFacetIterator(_predefinedRanges, rangeCounts, rangeCounts.size(), true);
    }
    return null;
  }
View Full Code Here

TOP

Related Classes of com.browseengine.bobo.util.BigSegmentedArray

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.