package org.apache.hadoop.hive.mastiffFlexibleEncoding.parquet;
/*
* adapt from parquet
*
*/
import java.io.IOException;
import java.util.Arrays;
public class ByteBitPackingValuesReader extends ValuesReader {
private static final int VALUES_AT_A_TIME = 8; // because we're using unpack8Values()
private static final Log LOG = Log.getLog(ByteBitPackingValuesReader.class);
private final int bitWidth;
private final BytePacker packer;
private final int[] decoded = new int[VALUES_AT_A_TIME];
private int decodedPosition = VALUES_AT_A_TIME - 1;
private byte[] encoded;
private int encodedPos;
private int nextOffset;
public ByteBitPackingValuesReader(int bound, Packer packer) {
this.bitWidth = BytesUtils.getWidthFromMaxInt(bound);
this.packer = packer.newBytePacker(bitWidth);
}
@Override
public int readInteger() {
++ decodedPosition;
if (decodedPosition == decoded.length) {
if (encodedPos + bitWidth > encoded.length) {
packer.unpack8Values(Arrays.copyOfRange(encoded, encodedPos, encodedPos + bitWidth), 0, decoded, 0);
} else {
packer.unpack8Values(encoded, encodedPos, decoded, 0);
}
encodedPos += bitWidth;
decodedPosition = 0;
}
return decoded[decodedPosition];
}
@Override
public void initFromPage(int valueCount, byte[] page, int offset)
throws IOException {
int effectiveBitLength = valueCount * bitWidth;
int length = BytesUtils.paddedByteCountFromBits(effectiveBitLength); // ceil
if (Log.DEBUG) LOG.debug("reading " + length + " bytes for " + valueCount + " values of size " + bitWidth + " bits." );
this.encoded = page;
this.encodedPos = offset;
this.decodedPosition = VALUES_AT_A_TIME - 1;
this.nextOffset = offset + length;
}
@Override
public int getNextOffset() {
return nextOffset;
}
@Override
public void skip() {
readInteger();
}
}