Examples of CollectionTokenStream


Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    tokens.add(newToken("token1", 0, 6));
    tokens.add(newToken("token2", 6, 11));
    tokens.add(newToken("token3", 11, 17));
    tokens.add(newToken("token4", 17, 23));
   
    CollectionTokenStream tokenStream = new CollectionTokenStream(tokens);
    Map<String, List<String>> hypernyms = new HashMap<String, List<String>>();
    List<String> tokenHypernyms = new ArrayList<String>();
    tokenHypernyms.add("token21");
    tokenHypernyms.add("token22");
    tokenHypernyms.add("token23");
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

  public void testNext() throws Exception{
    Collection<Token> tokens = new ArrayList<Token>();
    tokens.add(newToken("token1 token2 token3", 0, 6));
    tokens.add(newToken("token4 token5 token6", 7, 13));
   
    TokenStream tokenStream = new CollectionTokenStream(tokens);
    TokenFilter filter = new SplitterFilter(tokenStream, " ");
   
    Token nextToken = new Token();
    filter.next(nextToken);
    assertNotNull(nextToken);
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    tokens.add(newToken("token1", 0, 6));
    tokens.add(newToken("token2", 7, 13));
    tokens.add(newToken("token3", 14, 20));
    tokens.add(newToken("token4", 21, 27));
   
    TokenStream tokenStream = new CollectionTokenStream(tokens);
    ReplaceFilter filter = new ReplaceFilter(tokenStream, mapping);
   
    Token next = new Token();
    filter.next(next);
    assertEquals("replacement1", new String(next.termBuffer(), 0, next.termLength()));
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    tokens.add(newToken("token1", 0, 6));
    tokens.add(newToken("token2", 7, 13));
    tokens.add(newToken("token3", 14, 20));
    tokens.add(newToken("token4", 21, 27));
   
    TokenStream tokenStream = new CollectionTokenStream(tokens);
    AdditionFilter filter = new AdditionFilter(tokenStream, "prefix_", AdditionFilter.PREFIX);
   
    Token nextToken = new Token();
    filter.next(nextToken);
    assertEquals("prefix_token1", new String(nextToken.termBuffer(), 0, nextToken.termLength()));
    nextToken = filter.next(nextToken);
    assertEquals("prefix_token2", new String(nextToken.termBuffer(), 0, nextToken.termLength()));
    nextToken = filter.next(nextToken);
    assertEquals("prefix_token3", new String(nextToken.termBuffer(), 0, nextToken.termLength()));
    nextToken = filter.next(nextToken);
    assertEquals("prefix_token4", new String(nextToken.termBuffer(), 0, nextToken.termLength()));   

    tokens = new ArrayList<Token>();
    tokens.add(newToken("token1", 0, 6));
    tokens.add(newToken("token2", 7, 13));
    tokens.add(newToken("token3", 14, 20));
    tokens.add(newToken("token4", 21, 27));
   
    tokenStream = new CollectionTokenStream(tokens);
    filter = new AdditionFilter(tokenStream, "_postfix", AdditionFilter.POSTFIX);
   
    nextToken = filter.next(nextToken);
    assertEquals("token1_postfix", new String(nextToken.termBuffer(), 0, nextToken.termLength()));
    nextToken = filter.next(nextToken);
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    Collection<Token> tokens = new ArrayList<Token>();
    tokens.add(new Token("token1".toCharArray(),0,6,0,6));
    tokens.add(new Token("token2".toCharArray(),0,6,7,13));
    tokens.add(new Token("token3".toCharArray(),0,6,14,20));

    tokenStream = new CollectionTokenStream(tokens);
  }
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    tokens.add(newToken("token2", 14, 20));
    tokens.add(newToken("token2", 21, 27));
    tokens.add(newToken("token3", 28, 33));
    tokens.add(newToken("token4", 34, 40));
   
    TokenStream tokenStream = new CollectionTokenStream(tokens);
    TokenFilter filter = new PositionFilter(tokenStream, PositionFilter.FIRST_POSITION);
   
    Token nextToken = new Token();
    filter.next(nextToken);
    assertNotNull(nextToken);
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    List<Token> tokens = new ArrayList<Token>();
    tokens.add(newToken("token1", 0, 6));
    tokens.add(newToken("token2", 7, 13));
    tokens.add(newToken("token3", 14, 20));
   
    TokenStream tokenStream = new CollectionTokenStream(tokens );
    tokenStreams.add(tokenStream);

    tokens = new ArrayList<Token>();
    tokens.add(newToken("token4", 21, 27));
    tokens.add(newToken("token5", 28, 33));
    tokens.add(newToken("token6", 34, 40));

    tokenStream = new CollectionTokenStream(tokens );
    tokenStreams.add(tokenStream);
   
    TokenStreamConcatenator concatenator = new TokenStreamConcatenator(tokenStreams);
   
    Token nextToken = new Token();
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    List<Token> tokens = new ArrayList<Token>();
    tokens.add(newToken("token1", 0, 6));
    tokens.add(newToken("token2", 7, 13));
    tokens.add(newToken("token3", 14, 20));
   
    TokenStream tokenStream = new CollectionTokenStream(tokens );
   
    UpperCaseFilter upperCaseTokenFilter = new UpperCaseFilter(tokenStream);
   
    Token next = new Token();
    upperCaseTokenFilter.next(next);
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    Collection<Token> tokens2 = new ArrayList<Token>();
    tokens2.add(new Token("token4".toCharArray(),0,6,0,6));
    tokens2.add(new Token("token5".toCharArray(),0,6,7,13));
    tokens2.add(new Token("token6".toCharArray(),0,6,14,20));

    tokenStream1 = new CollectionTokenStream(tokens1);
    tokenStream2 = new CollectionTokenStream(tokens2);

    tokenStreams = Lists.newArrayList(tokenStream1, tokenStream2);
   
    fieldDescription = new FieldDescription("field1");
    fieldDescription.getAnnotationDescriptions().add(annotationDescription1);
View Full Code Here

Examples of org.apache.uima.lucas.indexer.test.util.CollectionTokenStream

    Collection<Token> tokens1 = new ArrayList<Token>();
    tokens1.add(new Token("token1".toCharArray(),0,6,0,6));
    tokens1.add(new Token("token2".toCharArray(),0,6,7,13));
    tokens1.add(new Token("token3".toCharArray(),0,6,14,20));

    CollectionTokenStream tokenStream1 = new CollectionTokenStream(tokens1);
    String concatenatedString = tokenStreamStringConcatenator.tokenStreamToStringWithDelimiter(tokenStream1, " ");
    assertEquals("token1 token2 token3", concatenatedString);
   
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.