Examples of addAttribute()


Examples of org.apache.lucene.analysis.standard.StandardTokenizer.addAttribute()

    TokenStream result = new StandardTokenizer(Version.LUCENE_CURRENT, reader);
    result = new StandardFilter(result);
    result = new LowerCaseFilter(result);
    result = new StopFilter(true, result, StandardAnalyzer.STOP_WORDS_SET);
   
    TermAttribute termAtt = (TermAttribute) result.addAttribute(TermAttribute.class);
    StringBuilder buf = new StringBuilder();
    try {
      while (result.incrementToken()) {
        if (termAtt.termLength() < 3) continue;
        String word = new String(termAtt.termBuffer(), 0, termAtt.termLength());

Examples of org.apache.lucene.analysis.wikipedia.WikipediaTokenizer.addAttribute()

   
    // now check the flags, TODO: add way to check flags from BaseTokenStreamTestCase?
    tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
    int expectedFlags[] = new int[] { UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0,
        0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0 };
    FlagsAttribute flagsAtt = tf.addAttribute(FlagsAttribute.class);
    tf.reset();
    for (int i = 0; i < expectedFlags.length; i++) {
      assertTrue(tf.incrementToken());
      assertEquals("flags " + i, expectedFlags[i], flagsAtt.getFlags());
    }

Examples of org.apache.lucene.queryParser.standard.config.StandardQueryConfigHandler.addAttribute()

  @Test
  public void testAttributes() {
    StandardQueryConfigHandler config = new StandardQueryConfigHandler();

    AnalyzerAttribute analyzerAttr = config
        .addAttribute(AnalyzerAttribute.class);

    Assert.assertTrue(null == analyzerAttr.getAnalyzer());
    Assert.assertTrue(null == config.get(ConfigurationKeys.ANALYZER));
    Analyzer analyzer = new KeywordAnalyzer();

Examples of org.apache.lucene.util.AttributeSource.addAttribute()

          ScoreTermQueue variantsQ = new ScoreTermQueue(MAX_VARIANTS_PER_TERM); //maxNum variants considered for any one term
          float minScore = 0;
          Term startTerm = new Term(f.fieldName, term);
          AttributeSource atts = new AttributeSource();
          MaxNonCompetitiveBoostAttribute maxBoostAtt =
            atts.addAttribute(MaxNonCompetitiveBoostAttribute.class);
          SlowFuzzyTermsEnum fe = new SlowFuzzyTermsEnum(terms, atts, startTerm, f.minSimilarity, f.prefixLength);
          //store the df so all variants use same idf
          int df = reader.docFreq(startTerm);
          int numVariants = 0;
          int totalVariantDocFreqs = 0;

Examples of org.apache.lucene.wikipedia.analysis.WikipediaTokenizer.addAttribute()

   * @throws IOException
   */
  static Set<String> getTokens(Article article) throws IOException {
    Set<String> tokenList = new HashSet<String>();
    WikipediaTokenizer tok = new WikipediaTokenizer(new StringReader(article.getText()));
    TermAttribute term = tok.addAttribute(TermAttribute.class);
    try {
      while (tok.incrementToken()) {
        String token = term.term();
        if (!StringUtils.isEmpty(token))
          tokenList.add(token);

Examples of org.apache.maven.doxia.sink.SinkEventAttributeSet.addAttribute()

        sink.head_();

        sink.body();

        SinkEventAttributeSet atts = new SinkEventAttributeSet();
        atts.addAttribute( SinkEventAttributes.TYPE, "text/javascript" );
        sink.unknown( "script", new Object[]{ HtmlMarkup.TAG_TYPE_START }, atts );
        sink.unknown( "cdata", new Object[]{ HtmlMarkup.CDATA_TYPE, javascriptToggleDisplayCode() }, null );
        sink.unknown( "script", new Object[]{ HtmlMarkup.TAG_TYPE_END }, null );

        sink.section1();

Examples of org.apache.maven.doxia.sink.SinkEventAttributes.addAttribute()

    }

    protected void sinkFigure( Sink sink, String image, String altText )
    {
        SinkEventAttributes attributes = new SinkEventAttributeSet();
        attributes.addAttribute( "alt", altText );
        attributes.addAttribute( "title", altText );

        sink.figure( null );

        sink.figureGraphics( image, attributes );

Examples of org.apache.neethi.Policy.addAttribute()

                            key = new QName(namespace, n.getLocalName());
   
                        } else {
                            key = new QName(namespace, n.getLocalName(), n.getPrefix());
                        }
                        policyOperator.addAttribute(key, n.getNodeValue());
                    }
                }           
            }
   
           

Examples of org.apache.ojb.broker.metadata.JdbcConnectionDescriptor.addAttribute()

        } else {
            platform = jcd.getDbms();
        }
       
        // special attributes
        jcd.addAttribute("org.apache.jetspeed.engineScoped",
                         Boolean.toString(jetspeedEngineScoped));
    }

    /**
     * @param jcd

Examples of org.apache.ojb.broker.metadata.SequenceDescriptor.addAttribute()

            // modify jcd copy
            jcd.setJcdAlias(jcdAlias);
            SequenceDescriptor sd = jcd.getSequenceDescriptor();
            assertNotNull("Can not find sequence-descriptor - check test", sd);
            // don't use autoNaming
            sd.addAttribute("autoNaming", "false");
            // add new connection descriptor to global base
            mm.connectionRepository().addDescriptor(jcd);

            // allow per thread changes of persistent object data
            mm.setEnablePerThreadChanges(true);
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.