/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analysis;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.TokenStream;
/** Simple Tests to ensure this factory is working */
public class TestPatternTokenizerFactory extends BaseTokenTestCase
{
public void testFactory() throws Exception {
final String INPUT = "Günther Günther is here";
// create PatternTokenizer
Map<String,String> args = new HashMap<String, String>();
args.put( PatternTokenizerFactory.PATTERN, "[,;/\\s]+" );
PatternTokenizerFactory tokFactory = new PatternTokenizerFactory();
tokFactory.init( args );
TokenStream stream = tokFactory.create( new StringReader(INPUT) );
assertTokenStreamContents(stream,
new String[] { "Günther", "Günther", "is", "here" });
}
}