Package org.apache.lucene.analysis

Examples of org.apache.lucene.analysis.Token.startOffset()


      public boolean incrementToken() throws IOException {
        if(iter.hasNext()) {
          Token token = (Token) iter.next();
          termAtt.setTermBuffer(token.term());
          posIncrAtt.setPositionIncrement(token.getPositionIncrement());
          offsetAtt.setOffset(token.startOffset(), token.endOffset());
          return true;
        }
        return false;
      }
    };
View Full Code Here


      }
      Token prototype = (Token) iterator.next();
      termAtt.setTermBuffer(prototype.termBuffer(), 0, prototype.termLength());
      posIncrAtt.setPositionIncrement(prototype.getPositionIncrement());
      flagsAtt.setFlags(prototype.getFlags());
      offsetAtt.setOffset(prototype.startOffset(), prototype.endOffset());
      typeAtt.setType(prototype.type());
      payloadAtt.setPayload(prototype.getPayload());

      return true;
    }
View Full Code Here

    public final boolean incrementToken() throws IOException {
      clearAttributes();
      if (index < testToken.length) {
        Token t = testToken[index++];
        termAtt.setTermBuffer(t.termBuffer(), 0, t.termLength());
        offsetAtt.setOffset(t.startOffset(), t.endOffset());
        posIncrAtt.setPositionIncrement(t.getPositionIncrement());
        typeAtt.setType(TypeAttributeImpl.DEFAULT_TYPE);
        return true;
      } else {
        return false;
View Full Code Here

            if (currentToken >= tokens.length) {
              return false;
            }
            Token token = tokens[currentToken++];
            termAtt.setTermBuffer(token.term());
            offsetAtt.setOffset(token.startOffset(), token.endOffset());
            return true;
          }
        }     
        //code to reconstruct the original sequence of Tokens
        String[] terms=tpv.getTerms();         
View Full Code Here

                {
                    Token t1=(Token) o1;
                    Token t2=(Token) o2;
                    if(t1.startOffset()>t2.endOffset())
                        return 1;
                    if(t1.startOffset()<t2.startOffset())
                        return -1;
                    return 0;
                }});
        }
        return new StoredTokenStream(tokensInOriginalOrder);
View Full Code Here

          termDocumentInformationFactory.payloads.add(null);
        }

        if (eField_Tokens.getKey().isStoreOffsetWithTermVector()) {

          termDocumentInformationFactory.termOffsets.add(new TermVectorOffsetInfo(fieldSetting.offset + token.startOffset(), fieldSetting.offset + token.endOffset()));
          lastOffset = fieldSetting.offset + token.endOffset();
        }


      }
View Full Code Here

            if (currentToken >= tokens.length) {
              return false;
            }
            Token token = tokens[currentToken++];
            termAtt.setTermBuffer(token.term());
            offsetAtt.setOffset(token.startOffset(), token.endOffset());
            return true;
          }
        }     
        //code to reconstruct the original sequence of Tokens
        String[] terms=tpv.getTerms();         
View Full Code Here

        return reusableToken;
      } else
        while ((token = this.input.next(reusableToken)) != null) {
          if (token.term().equals("phrase")) {
            inPhrase = true;
            savedStart = token.startOffset();
            savedEnd = token.endOffset();
            token.setTermBuffer("phrase1");
            token.setStartOffset(savedStart);
            token.setEndOffset(savedEnd);
            return token;
View Full Code Here

        Token token = null;

        while ((token = ts.next()) != null) {
            System.out.println("ReTokenizeFile.reTokenize(File): " + token.termText() + " " +
                token.startOffset() + " " + token.endOffset() + " " + token.type());
        }

        return file.getAbsolutePath();
    }
View Full Code Here

            {
               TermVectorOffsetInfo[] tmp = info;
               info = new TermVectorOffsetInfo[tmp.length + 1];
               System.arraycopy(tmp, 0, info, 0, tmp.length);
            }
            info[info.length - 1] = new TermVectorOffsetInfo(t.startOffset(), t.endOffset());
            termMap.put(termText, info);
         }
      }
      catch (IOException e)
      {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.