package org.apache.lucene.analysis.icu;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import com.ibm.icu.text.Normalizer2;
/**
* Tests the ICUNormalizer2Filter
*/
public class TestICUNormalizer2Filter extends BaseTokenStreamTestCase {
public void testDefaults() throws IOException {
Analyzer a = new Analyzer() {
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
return new ICUNormalizer2Filter(
new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader));
}
};
// case folding
assertAnalyzesTo(a, "This is a test", new String[] { "this", "is", "a", "test" });
// case folding
assertAnalyzesTo(a, "Ruß", new String[] { "russ" });
// case folding
assertAnalyzesTo(a, "ΜΆΪΟΣ", new String[] { "μάϊοσ" });
assertAnalyzesTo(a, "Μάϊος", new String[] { "μάϊοσ" });
// supplementary case folding
assertAnalyzesTo(a, "𐐖", new String[] { "𐐾" });
// normalization
assertAnalyzesTo(a, "ﴳﴺﰧ", new String[] { "طمطمطم" });
// removal of default ignorables
assertAnalyzesTo(a, "क्ष", new String[] { "क्ष" });
}
public void testAlternate() throws IOException {
Analyzer a = new Analyzer() {
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
return new ICUNormalizer2Filter(
new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader),
/* specify nfc with decompose to get nfd */
Normalizer2.getInstance(null, "nfc", Normalizer2.Mode.DECOMPOSE));
}
};
// decompose EAcute into E + combining Acute
assertAnalyzesTo(a, "\u00E9", new String[] { "\u0065\u0301" });
}
}