package org.apache.lucene.index.codecs.appending;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.codecs.SegmentInfosReader;
import org.apache.lucene.index.codecs.SegmentInfosWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Version;
public class TestAppendingCodec extends LuceneTestCase {
static class AppendingCodecProvider extends CodecProvider {
Codec appending = new AppendingCodec();
SegmentInfosWriter infosWriter = new AppendingSegmentInfosWriter();
SegmentInfosReader infosReader = new AppendingSegmentInfosReader();
@Override
public Codec lookup(String name) {
return appending;
}
@Override
public Codec getWriter(SegmentWriteState state) {
return appending;
}
@Override
public SegmentInfosReader getSegmentInfosReader() {
return infosReader;
}
@Override
public SegmentInfosWriter getSegmentInfosWriter() {
return infosWriter;
}
}
private static class AppendingIndexOutputWrapper extends IndexOutput {
IndexOutput wrapped;
public AppendingIndexOutputWrapper(IndexOutput wrapped) {
this.wrapped = wrapped;
}
@Override
public void close() throws IOException {
wrapped.close();
}
@Override
public void flush() throws IOException {
wrapped.flush();
}
@Override
public long getFilePointer() {
return wrapped.getFilePointer();
}
@Override
public long length() throws IOException {
return wrapped.length();
}
@Override
public void seek(long pos) throws IOException {
throw new UnsupportedOperationException("seek() is unsupported");
}
@Override
public void writeByte(byte b) throws IOException {
wrapped.writeByte(b);
}
@Override
public void writeBytes(byte[] b, int offset, int length) throws IOException {
wrapped.writeBytes(b, offset, length);
}
}
@SuppressWarnings("serial")
private static class AppendingRAMDirectory extends MockDirectoryWrapper {
public AppendingRAMDirectory(Directory delegate) {
super(delegate);
}
@Override
public IndexOutput createOutput(String name) throws IOException {
return new AppendingIndexOutputWrapper(super.createOutput(name));
}
}
private static final String text = "the quick brown fox jumped over the lazy dog";
public void testCodec() throws Exception {
Directory dir = new AppendingRAMDirectory(new RAMDirectory());
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer());
cfg.setCodecProvider(new AppendingCodecProvider());
((LogMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy)cfg.getMergePolicy()).setUseCompoundDocStore(false);
IndexWriter writer = new IndexWriter(dir, cfg);
Document doc = new Document();
doc.add(new Field("f", text, Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
writer.addDocument(doc);
writer.commit();
writer.addDocument(doc);
writer.optimize();
writer.close();
IndexReader reader = IndexReader.open(dir, null, true, 1, new AppendingCodecProvider());
assertEquals(2, reader.numDocs());
doc = reader.document(0);
assertEquals(text, doc.get("f"));
Fields fields = MultiFields.getFields(reader);
Terms terms = fields.terms("f");
assertNotNull(terms);
TermsEnum te = terms.iterator();
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("quick")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("brown")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("fox")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("jumped")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("over")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("lazy")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("dog")));
assertEquals(SeekStatus.FOUND, te.seek(new BytesRef("the")));
DocsEnum de = te.docs(null, null);
assertTrue(de.advance(0) != DocsEnum.NO_MORE_DOCS);
assertEquals(2, de.freq());
assertTrue(de.advance(1) != DocsEnum.NO_MORE_DOCS);
assertTrue(de.advance(2) == DocsEnum.NO_MORE_DOCS);
reader.close();
}
}