PersianCharFilterFactory charfilterFactory = new PersianCharFilterFactory();
StandardTokenizerFactory tokenizerFactory = new StandardTokenizerFactory();
tokenizerFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
Map<String, String> args = Collections.emptyMap();
tokenizerFactory.init(args);
TokenStream stream = tokenizerFactory.create(charfilterFactory.create(reader));
assertTokenStreamContents(stream, new String[] { "می", "خورد" });
}
}