BufferedLoader loader = getBufferedLoader(maxdoc, workArea);
@SuppressWarnings("unchecked")
TermValueList<T> list = (listFactory == null ? (TermValueList<T>) new TermStringList()
: listFactory.createTermList());
IntArrayList minIDList = new IntArrayList();
IntArrayList maxIDList = new IntArrayList();
IntArrayList freqList = new IntArrayList();
OpenBitSet bitset = new OpenBitSet(maxdoc + 1);
int negativeValueCount = getNegativeValueCount(reader, field);
int t = 0; // current term number
list.add(null);
minIDList.add(-1);
maxIDList.add(-1);
freqList.add(0);
t++;
_overflow = false;
Terms terms = reader.terms(field);
if (terms != null) {
TermsEnum termsEnum = terms.iterator(null);
BytesRef text;
while ((text = termsEnum.next()) != null) {
String strText = text.utf8ToString();
list.add(strText);
Term term = new Term(field, strText);
DocsEnum docsEnum = reader.termDocsEnum(term);
// freqList.add(tenum.docFreq()); // removed because the df doesn't take into account
// the num of deletedDocs
int df = 0;
int minID = -1;
int maxID = -1;
int docID = -1;
int valId = (t - 1 < negativeValueCount) ? (negativeValueCount - t + 1) : t;
while ((docID = docsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
df++;
if (!loader.add(docID, valId)) logOverflow(fieldName);
minID = docID;
bitset.fastSet(docID);
while (docsEnum.nextDoc() != DocsEnum.NO_MORE_DOCS) {
docID = docsEnum.docID();
df++;
if (!loader.add(docID, valId)) logOverflow(fieldName);
bitset.fastSet(docID);
}
maxID = docID;
}
freqList.add(df);
minIDList.add(minID);
maxIDList.add(maxID);
t++;
}
}
list.seal();
try {
_nestedArray.load(maxdoc + 1, loader);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("failed to load due to " + e.toString(), e);
}
this.valArray = list;
this.freqs = freqList.toIntArray();
this.minIDs = minIDList.toIntArray();
this.maxIDs = maxIDList.toIntArray();
int doc = 0;
while (doc <= maxdoc && !_nestedArray.contains(doc, 0, true)) {