// Generated by the protocol buffer compiler. DO NOT EDIT! // source: HFile.proto package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class HFileProtos { private HFileProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } public interface FileInfoProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.FileInfoProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList(); /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index); /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ int getMapEntryCount(); /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getMapEntryOrBuilderList(); /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( int index); } /** * <pre> * Map of name/values * </pre> * * Protobuf type {@code hbase.pb.FileInfoProto} */ public static final class FileInfoProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.FileInfoProto) FileInfoProtoOrBuilder { // Use FileInfoProto.newBuilder() to construct. private FileInfoProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FileInfoProto() { mapEntry_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FileInfoProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { mapEntry_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(); mutable_bitField0_ |= 0x00000001; } mapEntry_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); } public static final int MAP_ENTRY_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> mapEntry_; /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList() { return mapEntry_; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getMapEntryOrBuilderList() { return mapEntry_; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public int getMapEntryCount() { return mapEntry_.size(); } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) { return mapEntry_.get(index); } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( int index) { return mapEntry_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getMapEntryCount(); i++) { if (!getMapEntry(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < mapEntry_.size(); i++) { output.writeMessage(1, mapEntry_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < mapEntry_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, mapEntry_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) obj; boolean result = true; result = result && getMapEntryList() .equals(other.getMapEntryList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getMapEntryCount() > 0) { hash = (37 * hash) + MAP_ENTRY_FIELD_NUMBER; hash = (53 * hash) + getMapEntryList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Map of name/values * </pre> * * Protobuf type {@code hbase.pb.FileInfoProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.FileInfoProto) org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMapEntryFieldBuilder(); } } public Builder clear() { super.clear(); if (mapEntryBuilder_ == null) { mapEntry_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { mapEntryBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto(this); int from_bitField0_ = bitField0_; if (mapEntryBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { mapEntry_ = java.util.Collections.unmodifiableList(mapEntry_); bitField0_ = (bitField0_ & ~0x00000001); } result.mapEntry_ = mapEntry_; } else { result.mapEntry_ = mapEntryBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto.getDefaultInstance()) return this; if (mapEntryBuilder_ == null) { if (!other.mapEntry_.isEmpty()) { if (mapEntry_.isEmpty()) { mapEntry_ = other.mapEntry_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureMapEntryIsMutable(); mapEntry_.addAll(other.mapEntry_); } onChanged(); } } else { if (!other.mapEntry_.isEmpty()) { if (mapEntryBuilder_.isEmpty()) { mapEntryBuilder_.dispose(); mapEntryBuilder_ = null; mapEntry_ = other.mapEntry_; bitField0_ = (bitField0_ & ~0x00000001); mapEntryBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMapEntryFieldBuilder() : null; } else { mapEntryBuilder_.addAllMessages(other.mapEntry_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getMapEntryCount(); i++) { if (!getMapEntry(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> mapEntry_ = java.util.Collections.emptyList(); private void ensureMapEntryIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { mapEntry_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(mapEntry_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> mapEntryBuilder_; /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList() { if (mapEntryBuilder_ == null) { return java.util.Collections.unmodifiableList(mapEntry_); } else { return mapEntryBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public int getMapEntryCount() { if (mapEntryBuilder_ == null) { return mapEntry_.size(); } else { return mapEntryBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) { if (mapEntryBuilder_ == null) { return mapEntry_.get(index); } else { return mapEntryBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder setMapEntry( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (mapEntryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMapEntryIsMutable(); mapEntry_.set(index, value); onChanged(); } else { mapEntryBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder setMapEntry( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); mapEntry_.set(index, builderForValue.build()); onChanged(); } else { mapEntryBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder addMapEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (mapEntryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMapEntryIsMutable(); mapEntry_.add(value); onChanged(); } else { mapEntryBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder addMapEntry( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (mapEntryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMapEntryIsMutable(); mapEntry_.add(index, value); onChanged(); } else { mapEntryBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder addMapEntry( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); mapEntry_.add(builderForValue.build()); onChanged(); } else { mapEntryBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder addMapEntry( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); mapEntry_.add(index, builderForValue.build()); onChanged(); } else { mapEntryBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder addAllMapEntry( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> values) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, mapEntry_); onChanged(); } else { mapEntryBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder clearMapEntry() { if (mapEntryBuilder_ == null) { mapEntry_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { mapEntryBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public Builder removeMapEntry(int index) { if (mapEntryBuilder_ == null) { ensureMapEntryIsMutable(); mapEntry_.remove(index); onChanged(); } else { mapEntryBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getMapEntryBuilder( int index) { return getMapEntryFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder( int index) { if (mapEntryBuilder_ == null) { return mapEntry_.get(index); } else { return mapEntryBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getMapEntryOrBuilderList() { if (mapEntryBuilder_ != null) { return mapEntryBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(mapEntry_); } } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder() { return getMapEntryFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder( int index) { return getMapEntryFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getMapEntryBuilderList() { return getMapEntryFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getMapEntryFieldBuilder() { if (mapEntryBuilder_ == null) { mapEntryBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( mapEntry_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); mapEntry_ = null; } return mapEntryBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.FileInfoProto) } // @@protoc_insertion_point(class_scope:hbase.pb.FileInfoProto) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileInfoProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FileInfoProto>() { public FileInfoProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FileInfoProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileInfoProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileInfoProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FileTrailerProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.FileTrailerProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional uint64 file_info_offset = 1;</code> */ boolean hasFileInfoOffset(); /** * <code>optional uint64 file_info_offset = 1;</code> */ long getFileInfoOffset(); /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ boolean hasLoadOnOpenDataOffset(); /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ long getLoadOnOpenDataOffset(); /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ boolean hasUncompressedDataIndexSize(); /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ long getUncompressedDataIndexSize(); /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ boolean hasTotalUncompressedBytes(); /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ long getTotalUncompressedBytes(); /** * <code>optional uint32 data_index_count = 5;</code> */ boolean hasDataIndexCount(); /** * <code>optional uint32 data_index_count = 5;</code> */ int getDataIndexCount(); /** * <code>optional uint32 meta_index_count = 6;</code> */ boolean hasMetaIndexCount(); /** * <code>optional uint32 meta_index_count = 6;</code> */ int getMetaIndexCount(); /** * <code>optional uint64 entry_count = 7;</code> */ boolean hasEntryCount(); /** * <code>optional uint64 entry_count = 7;</code> */ long getEntryCount(); /** * <code>optional uint32 num_data_index_levels = 8;</code> */ boolean hasNumDataIndexLevels(); /** * <code>optional uint32 num_data_index_levels = 8;</code> */ int getNumDataIndexLevels(); /** * <code>optional uint64 first_data_block_offset = 9;</code> */ boolean hasFirstDataBlockOffset(); /** * <code>optional uint64 first_data_block_offset = 9;</code> */ long getFirstDataBlockOffset(); /** * <code>optional uint64 last_data_block_offset = 10;</code> */ boolean hasLastDataBlockOffset(); /** * <code>optional uint64 last_data_block_offset = 10;</code> */ long getLastDataBlockOffset(); /** * <code>optional string comparator_class_name = 11;</code> */ boolean hasComparatorClassName(); /** * <code>optional string comparator_class_name = 11;</code> */ java.lang.String getComparatorClassName(); /** * <code>optional string comparator_class_name = 11;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getComparatorClassNameBytes(); /** * <code>optional uint32 compression_codec = 12;</code> */ boolean hasCompressionCodec(); /** * <code>optional uint32 compression_codec = 12;</code> */ int getCompressionCodec(); /** * <code>optional bytes encryption_key = 13;</code> */ boolean hasEncryptionKey(); /** * <code>optional bytes encryption_key = 13;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncryptionKey(); } /** * <pre> * HFile file trailer * </pre> * * Protobuf type {@code hbase.pb.FileTrailerProto} */ public static final class FileTrailerProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.FileTrailerProto) FileTrailerProtoOrBuilder { // Use FileTrailerProto.newBuilder() to construct. private FileTrailerProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FileTrailerProto() { fileInfoOffset_ = 0L; loadOnOpenDataOffset_ = 0L; uncompressedDataIndexSize_ = 0L; totalUncompressedBytes_ = 0L; dataIndexCount_ = 0; metaIndexCount_ = 0; entryCount_ = 0L; numDataIndexLevels_ = 0; firstDataBlockOffset_ = 0L; lastDataBlockOffset_ = 0L; comparatorClassName_ = ""; compressionCodec_ = 0; encryptionKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FileTrailerProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; fileInfoOffset_ = input.readUInt64(); break; } case 16: { bitField0_ |= 0x00000002; loadOnOpenDataOffset_ = input.readUInt64(); break; } case 24: { bitField0_ |= 0x00000004; uncompressedDataIndexSize_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; totalUncompressedBytes_ = input.readUInt64(); break; } case 40: { bitField0_ |= 0x00000010; dataIndexCount_ = input.readUInt32(); break; } case 48: { bitField0_ |= 0x00000020; metaIndexCount_ = input.readUInt32(); break; } case 56: { bitField0_ |= 0x00000040; entryCount_ = input.readUInt64(); break; } case 64: { bitField0_ |= 0x00000080; numDataIndexLevels_ = input.readUInt32(); break; } case 72: { bitField0_ |= 0x00000100; firstDataBlockOffset_ = input.readUInt64(); break; } case 80: { bitField0_ |= 0x00000200; lastDataBlockOffset_ = input.readUInt64(); break; } case 90: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000400; comparatorClassName_ = bs; break; } case 96: { bitField0_ |= 0x00000800; compressionCodec_ = input.readUInt32(); break; } case 106: { bitField0_ |= 0x00001000; encryptionKey_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class); } private int bitField0_; public static final int FILE_INFO_OFFSET_FIELD_NUMBER = 1; private long fileInfoOffset_; /** * <code>optional uint64 file_info_offset = 1;</code> */ public boolean hasFileInfoOffset() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 file_info_offset = 1;</code> */ public long getFileInfoOffset() { return fileInfoOffset_; } public static final int LOAD_ON_OPEN_DATA_OFFSET_FIELD_NUMBER = 2; private long loadOnOpenDataOffset_; /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ public boolean hasLoadOnOpenDataOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ public long getLoadOnOpenDataOffset() { return loadOnOpenDataOffset_; } public static final int UNCOMPRESSED_DATA_INDEX_SIZE_FIELD_NUMBER = 3; private long uncompressedDataIndexSize_; /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ public boolean hasUncompressedDataIndexSize() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ public long getUncompressedDataIndexSize() { return uncompressedDataIndexSize_; } public static final int TOTAL_UNCOMPRESSED_BYTES_FIELD_NUMBER = 4; private long totalUncompressedBytes_; /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ public boolean hasTotalUncompressedBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ public long getTotalUncompressedBytes() { return totalUncompressedBytes_; } public static final int DATA_INDEX_COUNT_FIELD_NUMBER = 5; private int dataIndexCount_; /** * <code>optional uint32 data_index_count = 5;</code> */ public boolean hasDataIndexCount() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional uint32 data_index_count = 5;</code> */ public int getDataIndexCount() { return dataIndexCount_; } public static final int META_INDEX_COUNT_FIELD_NUMBER = 6; private int metaIndexCount_; /** * <code>optional uint32 meta_index_count = 6;</code> */ public boolean hasMetaIndexCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 meta_index_count = 6;</code> */ public int getMetaIndexCount() { return metaIndexCount_; } public static final int ENTRY_COUNT_FIELD_NUMBER = 7; private long entryCount_; /** * <code>optional uint64 entry_count = 7;</code> */ public boolean hasEntryCount() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint64 entry_count = 7;</code> */ public long getEntryCount() { return entryCount_; } public static final int NUM_DATA_INDEX_LEVELS_FIELD_NUMBER = 8; private int numDataIndexLevels_; /** * <code>optional uint32 num_data_index_levels = 8;</code> */ public boolean hasNumDataIndexLevels() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 num_data_index_levels = 8;</code> */ public int getNumDataIndexLevels() { return numDataIndexLevels_; } public static final int FIRST_DATA_BLOCK_OFFSET_FIELD_NUMBER = 9; private long firstDataBlockOffset_; /** * <code>optional uint64 first_data_block_offset = 9;</code> */ public boolean hasFirstDataBlockOffset() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 first_data_block_offset = 9;</code> */ public long getFirstDataBlockOffset() { return firstDataBlockOffset_; } public static final int LAST_DATA_BLOCK_OFFSET_FIELD_NUMBER = 10; private long lastDataBlockOffset_; /** * <code>optional uint64 last_data_block_offset = 10;</code> */ public boolean hasLastDataBlockOffset() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional uint64 last_data_block_offset = 10;</code> */ public long getLastDataBlockOffset() { return lastDataBlockOffset_; } public static final int COMPARATOR_CLASS_NAME_FIELD_NUMBER = 11; private volatile java.lang.Object comparatorClassName_; /** * <code>optional string comparator_class_name = 11;</code> */ public boolean hasComparatorClassName() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <code>optional string comparator_class_name = 11;</code> */ public java.lang.String getComparatorClassName() { java.lang.Object ref = comparatorClassName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { comparatorClassName_ = s; } return s; } } /** * <code>optional string comparator_class_name = 11;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getComparatorClassNameBytes() { java.lang.Object ref = comparatorClassName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); comparatorClassName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int COMPRESSION_CODEC_FIELD_NUMBER = 12; private int compressionCodec_; /** * <code>optional uint32 compression_codec = 12;</code> */ public boolean hasCompressionCodec() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <code>optional uint32 compression_codec = 12;</code> */ public int getCompressionCodec() { return compressionCodec_; } public static final int ENCRYPTION_KEY_FIELD_NUMBER = 13; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encryptionKey_; /** * <code>optional bytes encryption_key = 13;</code> */ public boolean hasEncryptionKey() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <code>optional bytes encryption_key = 13;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncryptionKey() { return encryptionKey_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, fileInfoOffset_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, loadOnOpenDataOffset_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, uncompressedDataIndexSize_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, totalUncompressedBytes_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeUInt32(5, dataIndexCount_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(6, metaIndexCount_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt64(7, entryCount_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt32(8, numDataIndexLevels_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeUInt64(9, firstDataBlockOffset_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeUInt64(10, lastDataBlockOffset_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 11, comparatorClassName_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { output.writeUInt32(12, compressionCodec_); } if (((bitField0_ & 0x00001000) == 0x00001000)) { output.writeBytes(13, encryptionKey_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(1, fileInfoOffset_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, loadOnOpenDataOffset_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, uncompressedDataIndexSize_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, totalUncompressedBytes_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(5, dataIndexCount_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(6, metaIndexCount_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(7, entryCount_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(8, numDataIndexLevels_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(9, firstDataBlockOffset_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(10, lastDataBlockOffset_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(11, comparatorClassName_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(12, compressionCodec_); } if (((bitField0_ & 0x00001000) == 0x00001000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(13, encryptionKey_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) obj; boolean result = true; result = result && (hasFileInfoOffset() == other.hasFileInfoOffset()); if (hasFileInfoOffset()) { result = result && (getFileInfoOffset() == other.getFileInfoOffset()); } result = result && (hasLoadOnOpenDataOffset() == other.hasLoadOnOpenDataOffset()); if (hasLoadOnOpenDataOffset()) { result = result && (getLoadOnOpenDataOffset() == other.getLoadOnOpenDataOffset()); } result = result && (hasUncompressedDataIndexSize() == other.hasUncompressedDataIndexSize()); if (hasUncompressedDataIndexSize()) { result = result && (getUncompressedDataIndexSize() == other.getUncompressedDataIndexSize()); } result = result && (hasTotalUncompressedBytes() == other.hasTotalUncompressedBytes()); if (hasTotalUncompressedBytes()) { result = result && (getTotalUncompressedBytes() == other.getTotalUncompressedBytes()); } result = result && (hasDataIndexCount() == other.hasDataIndexCount()); if (hasDataIndexCount()) { result = result && (getDataIndexCount() == other.getDataIndexCount()); } result = result && (hasMetaIndexCount() == other.hasMetaIndexCount()); if (hasMetaIndexCount()) { result = result && (getMetaIndexCount() == other.getMetaIndexCount()); } result = result && (hasEntryCount() == other.hasEntryCount()); if (hasEntryCount()) { result = result && (getEntryCount() == other.getEntryCount()); } result = result && (hasNumDataIndexLevels() == other.hasNumDataIndexLevels()); if (hasNumDataIndexLevels()) { result = result && (getNumDataIndexLevels() == other.getNumDataIndexLevels()); } result = result && (hasFirstDataBlockOffset() == other.hasFirstDataBlockOffset()); if (hasFirstDataBlockOffset()) { result = result && (getFirstDataBlockOffset() == other.getFirstDataBlockOffset()); } result = result && (hasLastDataBlockOffset() == other.hasLastDataBlockOffset()); if (hasLastDataBlockOffset()) { result = result && (getLastDataBlockOffset() == other.getLastDataBlockOffset()); } result = result && (hasComparatorClassName() == other.hasComparatorClassName()); if (hasComparatorClassName()) { result = result && getComparatorClassName() .equals(other.getComparatorClassName()); } result = result && (hasCompressionCodec() == other.hasCompressionCodec()); if (hasCompressionCodec()) { result = result && (getCompressionCodec() == other.getCompressionCodec()); } result = result && (hasEncryptionKey() == other.hasEncryptionKey()); if (hasEncryptionKey()) { result = result && getEncryptionKey() .equals(other.getEncryptionKey()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFileInfoOffset()) { hash = (37 * hash) + FILE_INFO_OFFSET_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getFileInfoOffset()); } if (hasLoadOnOpenDataOffset()) { hash = (37 * hash) + LOAD_ON_OPEN_DATA_OFFSET_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getLoadOnOpenDataOffset()); } if (hasUncompressedDataIndexSize()) { hash = (37 * hash) + UNCOMPRESSED_DATA_INDEX_SIZE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getUncompressedDataIndexSize()); } if (hasTotalUncompressedBytes()) { hash = (37 * hash) + TOTAL_UNCOMPRESSED_BYTES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getTotalUncompressedBytes()); } if (hasDataIndexCount()) { hash = (37 * hash) + DATA_INDEX_COUNT_FIELD_NUMBER; hash = (53 * hash) + getDataIndexCount(); } if (hasMetaIndexCount()) { hash = (37 * hash) + META_INDEX_COUNT_FIELD_NUMBER; hash = (53 * hash) + getMetaIndexCount(); } if (hasEntryCount()) { hash = (37 * hash) + ENTRY_COUNT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getEntryCount()); } if (hasNumDataIndexLevels()) { hash = (37 * hash) + NUM_DATA_INDEX_LEVELS_FIELD_NUMBER; hash = (53 * hash) + getNumDataIndexLevels(); } if (hasFirstDataBlockOffset()) { hash = (37 * hash) + FIRST_DATA_BLOCK_OFFSET_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getFirstDataBlockOffset()); } if (hasLastDataBlockOffset()) { hash = (37 * hash) + LAST_DATA_BLOCK_OFFSET_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getLastDataBlockOffset()); } if (hasComparatorClassName()) { hash = (37 * hash) + COMPARATOR_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getComparatorClassName().hashCode(); } if (hasCompressionCodec()) { hash = (37 * hash) + COMPRESSION_CODEC_FIELD_NUMBER; hash = (53 * hash) + getCompressionCodec(); } if (hasEncryptionKey()) { hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER; hash = (53 * hash) + getEncryptionKey().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * HFile file trailer * </pre> * * Protobuf type {@code hbase.pb.FileTrailerProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.FileTrailerProto) org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); fileInfoOffset_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); loadOnOpenDataOffset_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); uncompressedDataIndexSize_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); totalUncompressedBytes_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); dataIndexCount_ = 0; bitField0_ = (bitField0_ & ~0x00000010); metaIndexCount_ = 0; bitField0_ = (bitField0_ & ~0x00000020); entryCount_ = 0L; bitField0_ = (bitField0_ & ~0x00000040); numDataIndexLevels_ = 0; bitField0_ = (bitField0_ & ~0x00000080); firstDataBlockOffset_ = 0L; bitField0_ = (bitField0_ & ~0x00000100); lastDataBlockOffset_ = 0L; bitField0_ = (bitField0_ & ~0x00000200); comparatorClassName_ = ""; bitField0_ = (bitField0_ & ~0x00000400); compressionCodec_ = 0; bitField0_ = (bitField0_ & ~0x00000800); encryptionKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00001000); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.fileInfoOffset_ = fileInfoOffset_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.loadOnOpenDataOffset_ = loadOnOpenDataOffset_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.uncompressedDataIndexSize_ = uncompressedDataIndexSize_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.totalUncompressedBytes_ = totalUncompressedBytes_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.dataIndexCount_ = dataIndexCount_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.metaIndexCount_ = metaIndexCount_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.entryCount_ = entryCount_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000080; } result.numDataIndexLevels_ = numDataIndexLevels_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000100; } result.firstDataBlockOffset_ = firstDataBlockOffset_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000200; } result.lastDataBlockOffset_ = lastDataBlockOffset_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000400; } result.comparatorClassName_ = comparatorClassName_; if (((from_bitField0_ & 0x00000800) == 0x00000800)) { to_bitField0_ |= 0x00000800; } result.compressionCodec_ = compressionCodec_; if (((from_bitField0_ & 0x00001000) == 0x00001000)) { to_bitField0_ |= 0x00001000; } result.encryptionKey_ = encryptionKey_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance()) return this; if (other.hasFileInfoOffset()) { setFileInfoOffset(other.getFileInfoOffset()); } if (other.hasLoadOnOpenDataOffset()) { setLoadOnOpenDataOffset(other.getLoadOnOpenDataOffset()); } if (other.hasUncompressedDataIndexSize()) { setUncompressedDataIndexSize(other.getUncompressedDataIndexSize()); } if (other.hasTotalUncompressedBytes()) { setTotalUncompressedBytes(other.getTotalUncompressedBytes()); } if (other.hasDataIndexCount()) { setDataIndexCount(other.getDataIndexCount()); } if (other.hasMetaIndexCount()) { setMetaIndexCount(other.getMetaIndexCount()); } if (other.hasEntryCount()) { setEntryCount(other.getEntryCount()); } if (other.hasNumDataIndexLevels()) { setNumDataIndexLevels(other.getNumDataIndexLevels()); } if (other.hasFirstDataBlockOffset()) { setFirstDataBlockOffset(other.getFirstDataBlockOffset()); } if (other.hasLastDataBlockOffset()) { setLastDataBlockOffset(other.getLastDataBlockOffset()); } if (other.hasComparatorClassName()) { bitField0_ |= 0x00000400; comparatorClassName_ = other.comparatorClassName_; onChanged(); } if (other.hasCompressionCodec()) { setCompressionCodec(other.getCompressionCodec()); } if (other.hasEncryptionKey()) { setEncryptionKey(other.getEncryptionKey()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long fileInfoOffset_ ; /** * <code>optional uint64 file_info_offset = 1;</code> */ public boolean hasFileInfoOffset() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 file_info_offset = 1;</code> */ public long getFileInfoOffset() { return fileInfoOffset_; } /** * <code>optional uint64 file_info_offset = 1;</code> */ public Builder setFileInfoOffset(long value) { bitField0_ |= 0x00000001; fileInfoOffset_ = value; onChanged(); return this; } /** * <code>optional uint64 file_info_offset = 1;</code> */ public Builder clearFileInfoOffset() { bitField0_ = (bitField0_ & ~0x00000001); fileInfoOffset_ = 0L; onChanged(); return this; } private long loadOnOpenDataOffset_ ; /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ public boolean hasLoadOnOpenDataOffset() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ public long getLoadOnOpenDataOffset() { return loadOnOpenDataOffset_; } /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ public Builder setLoadOnOpenDataOffset(long value) { bitField0_ |= 0x00000002; loadOnOpenDataOffset_ = value; onChanged(); return this; } /** * <code>optional uint64 load_on_open_data_offset = 2;</code> */ public Builder clearLoadOnOpenDataOffset() { bitField0_ = (bitField0_ & ~0x00000002); loadOnOpenDataOffset_ = 0L; onChanged(); return this; } private long uncompressedDataIndexSize_ ; /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ public boolean hasUncompressedDataIndexSize() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ public long getUncompressedDataIndexSize() { return uncompressedDataIndexSize_; } /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ public Builder setUncompressedDataIndexSize(long value) { bitField0_ |= 0x00000004; uncompressedDataIndexSize_ = value; onChanged(); return this; } /** * <code>optional uint64 uncompressed_data_index_size = 3;</code> */ public Builder clearUncompressedDataIndexSize() { bitField0_ = (bitField0_ & ~0x00000004); uncompressedDataIndexSize_ = 0L; onChanged(); return this; } private long totalUncompressedBytes_ ; /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ public boolean hasTotalUncompressedBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ public long getTotalUncompressedBytes() { return totalUncompressedBytes_; } /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ public Builder setTotalUncompressedBytes(long value) { bitField0_ |= 0x00000008; totalUncompressedBytes_ = value; onChanged(); return this; } /** * <code>optional uint64 total_uncompressed_bytes = 4;</code> */ public Builder clearTotalUncompressedBytes() { bitField0_ = (bitField0_ & ~0x00000008); totalUncompressedBytes_ = 0L; onChanged(); return this; } private int dataIndexCount_ ; /** * <code>optional uint32 data_index_count = 5;</code> */ public boolean hasDataIndexCount() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional uint32 data_index_count = 5;</code> */ public int getDataIndexCount() { return dataIndexCount_; } /** * <code>optional uint32 data_index_count = 5;</code> */ public Builder setDataIndexCount(int value) { bitField0_ |= 0x00000010; dataIndexCount_ = value; onChanged(); return this; } /** * <code>optional uint32 data_index_count = 5;</code> */ public Builder clearDataIndexCount() { bitField0_ = (bitField0_ & ~0x00000010); dataIndexCount_ = 0; onChanged(); return this; } private int metaIndexCount_ ; /** * <code>optional uint32 meta_index_count = 6;</code> */ public boolean hasMetaIndexCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 meta_index_count = 6;</code> */ public int getMetaIndexCount() { return metaIndexCount_; } /** * <code>optional uint32 meta_index_count = 6;</code> */ public Builder setMetaIndexCount(int value) { bitField0_ |= 0x00000020; metaIndexCount_ = value; onChanged(); return this; } /** * <code>optional uint32 meta_index_count = 6;</code> */ public Builder clearMetaIndexCount() { bitField0_ = (bitField0_ & ~0x00000020); metaIndexCount_ = 0; onChanged(); return this; } private long entryCount_ ; /** * <code>optional uint64 entry_count = 7;</code> */ public boolean hasEntryCount() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint64 entry_count = 7;</code> */ public long getEntryCount() { return entryCount_; } /** * <code>optional uint64 entry_count = 7;</code> */ public Builder setEntryCount(long value) { bitField0_ |= 0x00000040; entryCount_ = value; onChanged(); return this; } /** * <code>optional uint64 entry_count = 7;</code> */ public Builder clearEntryCount() { bitField0_ = (bitField0_ & ~0x00000040); entryCount_ = 0L; onChanged(); return this; } private int numDataIndexLevels_ ; /** * <code>optional uint32 num_data_index_levels = 8;</code> */ public boolean hasNumDataIndexLevels() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 num_data_index_levels = 8;</code> */ public int getNumDataIndexLevels() { return numDataIndexLevels_; } /** * <code>optional uint32 num_data_index_levels = 8;</code> */ public Builder setNumDataIndexLevels(int value) { bitField0_ |= 0x00000080; numDataIndexLevels_ = value; onChanged(); return this; } /** * <code>optional uint32 num_data_index_levels = 8;</code> */ public Builder clearNumDataIndexLevels() { bitField0_ = (bitField0_ & ~0x00000080); numDataIndexLevels_ = 0; onChanged(); return this; } private long firstDataBlockOffset_ ; /** * <code>optional uint64 first_data_block_offset = 9;</code> */ public boolean hasFirstDataBlockOffset() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 first_data_block_offset = 9;</code> */ public long getFirstDataBlockOffset() { return firstDataBlockOffset_; } /** * <code>optional uint64 first_data_block_offset = 9;</code> */ public Builder setFirstDataBlockOffset(long value) { bitField0_ |= 0x00000100; firstDataBlockOffset_ = value; onChanged(); return this; } /** * <code>optional uint64 first_data_block_offset = 9;</code> */ public Builder clearFirstDataBlockOffset() { bitField0_ = (bitField0_ & ~0x00000100); firstDataBlockOffset_ = 0L; onChanged(); return this; } private long lastDataBlockOffset_ ; /** * <code>optional uint64 last_data_block_offset = 10;</code> */ public boolean hasLastDataBlockOffset() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional uint64 last_data_block_offset = 10;</code> */ public long getLastDataBlockOffset() { return lastDataBlockOffset_; } /** * <code>optional uint64 last_data_block_offset = 10;</code> */ public Builder setLastDataBlockOffset(long value) { bitField0_ |= 0x00000200; lastDataBlockOffset_ = value; onChanged(); return this; } /** * <code>optional uint64 last_data_block_offset = 10;</code> */ public Builder clearLastDataBlockOffset() { bitField0_ = (bitField0_ & ~0x00000200); lastDataBlockOffset_ = 0L; onChanged(); return this; } private java.lang.Object comparatorClassName_ = ""; /** * <code>optional string comparator_class_name = 11;</code> */ public boolean hasComparatorClassName() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <code>optional string comparator_class_name = 11;</code> */ public java.lang.String getComparatorClassName() { java.lang.Object ref = comparatorClassName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { comparatorClassName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string comparator_class_name = 11;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getComparatorClassNameBytes() { java.lang.Object ref = comparatorClassName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); comparatorClassName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string comparator_class_name = 11;</code> */ public Builder setComparatorClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000400; comparatorClassName_ = value; onChanged(); return this; } /** * <code>optional string comparator_class_name = 11;</code> */ public Builder clearComparatorClassName() { bitField0_ = (bitField0_ & ~0x00000400); comparatorClassName_ = getDefaultInstance().getComparatorClassName(); onChanged(); return this; } /** * <code>optional string comparator_class_name = 11;</code> */ public Builder setComparatorClassNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000400; comparatorClassName_ = value; onChanged(); return this; } private int compressionCodec_ ; /** * <code>optional uint32 compression_codec = 12;</code> */ public boolean hasCompressionCodec() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <code>optional uint32 compression_codec = 12;</code> */ public int getCompressionCodec() { return compressionCodec_; } /** * <code>optional uint32 compression_codec = 12;</code> */ public Builder setCompressionCodec(int value) { bitField0_ |= 0x00000800; compressionCodec_ = value; onChanged(); return this; } /** * <code>optional uint32 compression_codec = 12;</code> */ public Builder clearCompressionCodec() { bitField0_ = (bitField0_ & ~0x00000800); compressionCodec_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encryptionKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes encryption_key = 13;</code> */ public boolean hasEncryptionKey() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <code>optional bytes encryption_key = 13;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncryptionKey() { return encryptionKey_; } /** * <code>optional bytes encryption_key = 13;</code> */ public Builder setEncryptionKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00001000; encryptionKey_ = value; onChanged(); return this; } /** * <code>optional bytes encryption_key = 13;</code> */ public Builder clearEncryptionKey() { bitField0_ = (bitField0_ & ~0x00001000); encryptionKey_ = getDefaultInstance().getEncryptionKey(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.FileTrailerProto) } // @@protoc_insertion_point(class_scope:hbase.pb.FileTrailerProto) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileTrailerProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FileTrailerProto>() { public FileTrailerProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FileTrailerProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileTrailerProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FileTrailerProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FileInfoProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FileInfoProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FileTrailerProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\013HFile.proto\022\010hbase.pb\032\013HBase.proto\"<\n\r" + "FileInfoProto\022+\n\tmap_entry\030\001 \003(\0132\030.hbase" + ".pb.BytesBytesPair\"\221\003\n\020FileTrailerProto\022" + "\030\n\020file_info_offset\030\001 \001(\004\022 \n\030load_on_ope" + "n_data_offset\030\002 \001(\004\022$\n\034uncompressed_data" + "_index_size\030\003 \001(\004\022 \n\030total_uncompressed_" + "bytes\030\004 \001(\004\022\030\n\020data_index_count\030\005 \001(\r\022\030\n" + "\020meta_index_count\030\006 \001(\r\022\023\n\013entry_count\030\007" + " \001(\004\022\035\n\025num_data_index_levels\030\010 \001(\r\022\037\n\027f" + "irst_data_block_offset\030\t \001(\004\022\036\n\026last_dat", "a_block_offset\030\n \001(\004\022\035\n\025comparator_class" + "_name\030\013 \001(\t\022\031\n\021compression_codec\030\014 \001(\r\022\026" + "\n\016encryption_key\030\r \001(\014BH\n1org.apache.had" + "oop.hbase.shaded.protobuf.generatedB\013HFi" + "leProtosH\001\210\001\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); internal_static_hbase_pb_FileInfoProto_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_FileInfoProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_FileInfoProto_descriptor, new java.lang.String[] { "MapEntry", }); internal_static_hbase_pb_FileTrailerProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_FileTrailerProto_descriptor, new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", "EncryptionKey", }); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }