Examples of tokenStream()


Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

          app
                .showStatus("Couldn't instantiate analyzer - public 0-arg or 1-arg constructor(Version) required");
          return;
        }
      }
      TokenStream ts = analyzer.tokenStream("text", new StringReader(app
              .getString(inputText, "text")));
      app.removeAll(resultsList);
      ts.reset();

      while (ts.incrementToken()) {
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

              FieldType ft = schema.getFieldType(name);
              Analyzer analyzer = ft.getAnalyzer();
              Collection<Object> vals = doc.getFieldValues(name);
              for (Object val : vals) {
                Reader reader = new StringReader(val.toString());
                TokenStream tstream = analyzer.tokenStream(name, reader);
                NamedList<NamedList<Object>> tokens = getTokens(tstream);
                theTokens.add(name, tokens);
              }
            }
          }
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

      return queryAnalyzer.tokenStream(fieldName, reader);
    }
   
    Analyzer a = map.get(fieldName);
    if (a != null) {
      return a.tokenStream(fieldName, reader);
    }

    FieldType ft = parser.getReq().getSchema().getFieldType(fieldName);
    Analyzer qa = ft.getQueryAnalyzer();
    if (!(qa instanceof TokenizerChain)) {
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

    FieldType ft = parser.getReq().getSchema().getFieldType(fieldName);
    Analyzer qa = ft.getQueryAnalyzer();
    if (!(qa instanceof TokenizerChain)) {
      map.put(fieldName, qa);
      return qa.tokenStream(fieldName, reader);
    }
    TokenizerChain tcq = (TokenizerChain)qa;
    Analyzer ia = ft.getAnalyzer();
    if (ia == qa || !(ia instanceof TokenizerChain)) {
      map.put(fieldName, qa);
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

    String s = "Ab94aa4CdDbd34dfde082ed1b4c4d0c505b69";

    StringReader sr = new StringReader(s);
//    Analyzer analyzer =new StandardAnalyzer((Version) Enum.valueOf((Class) Class.forName("org.apache.lucene.util.Version"),  Version.LUCENE_35.name()));
    Analyzer analyzer = schema.getAnalyzer();//JobIndexPublic.setAnalyzer(conf);
    TokenStream tk=analyzer.tokenStream("rawquery", sr);

    boolean hasnext = tk.incrementToken();

    while(hasnext){
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

        }
      }
    }
    try {
      Analyzer analyzer = sp.getType().getQueryAnalyzer();
      TokenStream ts = analyzer.tokenStream(field,
              new StringReader(qstr));
      while (ts.incrementToken()) {//<co id="qqp.addTerms"/>
        String term = ((CharTermAttribute)
                ts.getAttribute(CharTermAttribute.class)).toString();
        sql.add(new SpanTermQuery(new Term(field, term)));
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

      int positionCount = 0;
      boolean severalTokensAtSamePosition = false;

      final TokenStream source;
      try {
        source = analyzer.tokenStream(field, new StringReader(text));
        source.reset();
      } catch (final IOException e1) {
        throw new RuntimeException(e1);
      }
      final CachingTokenFilter buffer = new CachingTokenFilter(source);
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

        if (reusableCharArray == null) {
          reusableCharArray = new ReusableCharArrayReader(termAtt.buffer(), 0, termAtt.length());
        } else {
          reusableCharArray.reset(termAtt.buffer(), 0, termAtt.length());
        }
        currentStream = analyzer.tokenStream("", reusableCharArray);
        currentStream.reset(); // reset to prepare the stream for consumption
        this.initTokenAttributes();
      }
      // Consume the token with the registered analyzer
      isConsumingToken = currentStream.incrementToken();
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

        String s = "football-soccer in the euro 2004 footie competition";
        QueryParser parser = new QueryParser("bookid", analyzer);
        Query query = parser.parse(srchkey);

        TokenStream tokenStream = analyzer.tokenStream(null, new StringReader(s));

        Highlighter highlighter = getHighlighter(query, null, tokenStream, HighlighterTest.this);

        // Get 3 best fragments and seperate with a "..."
        tokenStream = analyzer.tokenStream(null, new StringReader(s));
View Full Code Here

Examples of org.apache.lucene.analysis.Analyzer.tokenStream()

        TokenStream tokenStream = analyzer.tokenStream(null, new StringReader(s));

        Highlighter highlighter = getHighlighter(query, null, tokenStream, HighlighterTest.this);

        // Get 3 best fragments and seperate with a "..."
        tokenStream = analyzer.tokenStream(null, new StringReader(s));

        String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
        String expectedResult = "<B>football</B>-<B>soccer</B> in the euro 2004 <B>footie</B> competition";
        assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
            + " actual:" + result, expectedResult.equals(result));
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.