// Generated by the protocol buffer compiler. DO NOT EDIT! // source: WAL.proto package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class WALProtos { private WALProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } /** * Protobuf enum {@code hbase.pb.ScopeType} */ public enum ScopeType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>REPLICATION_SCOPE_LOCAL = 0;</code> */ REPLICATION_SCOPE_LOCAL(0), /** * <code>REPLICATION_SCOPE_GLOBAL = 1;</code> */ REPLICATION_SCOPE_GLOBAL(1), /** * <code>REPLICATION_SCOPE_SERIAL = 2;</code> */ REPLICATION_SCOPE_SERIAL(2), ; /** * <code>REPLICATION_SCOPE_LOCAL = 0;</code> */ public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; /** * <code>REPLICATION_SCOPE_GLOBAL = 1;</code> */ public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; /** * <code>REPLICATION_SCOPE_SERIAL = 2;</code> */ public static final int REPLICATION_SCOPE_SERIAL_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ScopeType valueOf(int value) { return forNumber(value); } public static ScopeType forNumber(int value) { switch (value) { case 0: return REPLICATION_SCOPE_LOCAL; case 1: return REPLICATION_SCOPE_GLOBAL; case 2: return REPLICATION_SCOPE_SERIAL; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<ScopeType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< ScopeType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<ScopeType>() { public ScopeType findValueByNumber(int number) { return ScopeType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0); } private static final ScopeType[] VALUES = values(); public static ScopeType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ScopeType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType) } public interface WALHeaderOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.WALHeader) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional bool has_compression = 1;</code> */ boolean hasHasCompression(); /** * <code>optional bool has_compression = 1;</code> */ boolean getHasCompression(); /** * <code>optional bytes encryption_key = 2;</code> */ boolean hasEncryptionKey(); /** * <code>optional bytes encryption_key = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncryptionKey(); /** * <code>optional bool has_tag_compression = 3;</code> */ boolean hasHasTagCompression(); /** * <code>optional bool has_tag_compression = 3;</code> */ boolean getHasTagCompression(); /** * <code>optional string writer_cls_name = 4;</code> */ boolean hasWriterClsName(); /** * <code>optional string writer_cls_name = 4;</code> */ java.lang.String getWriterClsName(); /** * <code>optional string writer_cls_name = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getWriterClsNameBytes(); /** * <code>optional string cell_codec_cls_name = 5;</code> */ boolean hasCellCodecClsName(); /** * <code>optional string cell_codec_cls_name = 5;</code> */ java.lang.String getCellCodecClsName(); /** * <code>optional string cell_codec_cls_name = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellCodecClsNameBytes(); } /** * Protobuf type {@code hbase.pb.WALHeader} */ public static final class WALHeader extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.WALHeader) WALHeaderOrBuilder { // Use WALHeader.newBuilder() to construct. private WALHeader(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WALHeader() { hasCompression_ = false; encryptionKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; hasTagCompression_ = false; writerClsName_ = ""; cellCodecClsName_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WALHeader( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; hasCompression_ = input.readBool(); break; } case 18: { bitField0_ |= 0x00000002; encryptionKey_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; hasTagCompression_ = input.readBool(); break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; writerClsName_ = bs; break; } case 42: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; cellCodecClsName_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder.class); } private int bitField0_; public static final int HAS_COMPRESSION_FIELD_NUMBER = 1; private boolean hasCompression_; /** * <code>optional bool has_compression = 1;</code> */ public boolean hasHasCompression() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool has_compression = 1;</code> */ public boolean getHasCompression() { return hasCompression_; } public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encryptionKey_; /** * <code>optional bytes encryption_key = 2;</code> */ public boolean hasEncryptionKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes encryption_key = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncryptionKey() { return encryptionKey_; } public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3; private boolean hasTagCompression_; /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean hasHasTagCompression() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean getHasTagCompression() { return hasTagCompression_; } public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4; private volatile java.lang.Object writerClsName_; /** * <code>optional string writer_cls_name = 4;</code> */ public boolean hasWriterClsName() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string writer_cls_name = 4;</code> */ public java.lang.String getWriterClsName() { java.lang.Object ref = writerClsName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { writerClsName_ = s; } return s; } } /** * <code>optional string writer_cls_name = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getWriterClsNameBytes() { java.lang.Object ref = writerClsName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); writerClsName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5; private volatile java.lang.Object cellCodecClsName_; /** * <code>optional string cell_codec_cls_name = 5;</code> */ public boolean hasCellCodecClsName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public java.lang.String getCellCodecClsName() { java.lang.Object ref = cellCodecClsName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellCodecClsName_ = s; } return s; } } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellCodecClsNameBytes() { java.lang.Object ref = cellCodecClsName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellCodecClsName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, hasCompression_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encryptionKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, hasTagCompression_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, writerClsName_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, cellCodecClsName_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(1, hasCompression_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, encryptionKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, hasTagCompression_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, writerClsName_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, cellCodecClsName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) obj; boolean result = true; result = result && (hasHasCompression() == other.hasHasCompression()); if (hasHasCompression()) { result = result && (getHasCompression() == other.getHasCompression()); } result = result && (hasEncryptionKey() == other.hasEncryptionKey()); if (hasEncryptionKey()) { result = result && getEncryptionKey() .equals(other.getEncryptionKey()); } result = result && (hasHasTagCompression() == other.hasHasTagCompression()); if (hasHasTagCompression()) { result = result && (getHasTagCompression() == other.getHasTagCompression()); } result = result && (hasWriterClsName() == other.hasWriterClsName()); if (hasWriterClsName()) { result = result && getWriterClsName() .equals(other.getWriterClsName()); } result = result && (hasCellCodecClsName() == other.hasCellCodecClsName()); if (hasCellCodecClsName()) { result = result && getCellCodecClsName() .equals(other.getCellCodecClsName()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHasCompression()) { hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getHasCompression()); } if (hasEncryptionKey()) { hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER; hash = (53 * hash) + getEncryptionKey().hashCode(); } if (hasHasTagCompression()) { hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getHasTagCompression()); } if (hasWriterClsName()) { hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER; hash = (53 * hash) + getWriterClsName().hashCode(); } if (hasCellCodecClsName()) { hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER; hash = (53 * hash) + getCellCodecClsName().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.WALHeader} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.WALHeader) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeaderOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); hasCompression_ = false; bitField0_ = (bitField0_ & ~0x00000001); encryptionKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); hasTagCompression_ = false; bitField0_ = (bitField0_ & ~0x00000004); writerClsName_ = ""; bitField0_ = (bitField0_ & ~0x00000008); cellCodecClsName_ = ""; bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.hasCompression_ = hasCompression_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.encryptionKey_ = encryptionKey_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.hasTagCompression_ = hasTagCompression_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.writerClsName_ = writerClsName_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.cellCodecClsName_ = cellCodecClsName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this; if (other.hasHasCompression()) { setHasCompression(other.getHasCompression()); } if (other.hasEncryptionKey()) { setEncryptionKey(other.getEncryptionKey()); } if (other.hasHasTagCompression()) { setHasTagCompression(other.getHasTagCompression()); } if (other.hasWriterClsName()) { bitField0_ |= 0x00000008; writerClsName_ = other.writerClsName_; onChanged(); } if (other.hasCellCodecClsName()) { bitField0_ |= 0x00000010; cellCodecClsName_ = other.cellCodecClsName_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean hasCompression_ ; /** * <code>optional bool has_compression = 1;</code> */ public boolean hasHasCompression() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool has_compression = 1;</code> */ public boolean getHasCompression() { return hasCompression_; } /** * <code>optional bool has_compression = 1;</code> */ public Builder setHasCompression(boolean value) { bitField0_ |= 0x00000001; hasCompression_ = value; onChanged(); return this; } /** * <code>optional bool has_compression = 1;</code> */ public Builder clearHasCompression() { bitField0_ = (bitField0_ & ~0x00000001); hasCompression_ = false; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encryptionKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes encryption_key = 2;</code> */ public boolean hasEncryptionKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes encryption_key = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncryptionKey() { return encryptionKey_; } /** * <code>optional bytes encryption_key = 2;</code> */ public Builder setEncryptionKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; encryptionKey_ = value; onChanged(); return this; } /** * <code>optional bytes encryption_key = 2;</code> */ public Builder clearEncryptionKey() { bitField0_ = (bitField0_ & ~0x00000002); encryptionKey_ = getDefaultInstance().getEncryptionKey(); onChanged(); return this; } private boolean hasTagCompression_ ; /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean hasHasTagCompression() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean getHasTagCompression() { return hasTagCompression_; } /** * <code>optional bool has_tag_compression = 3;</code> */ public Builder setHasTagCompression(boolean value) { bitField0_ |= 0x00000004; hasTagCompression_ = value; onChanged(); return this; } /** * <code>optional bool has_tag_compression = 3;</code> */ public Builder clearHasTagCompression() { bitField0_ = (bitField0_ & ~0x00000004); hasTagCompression_ = false; onChanged(); return this; } private java.lang.Object writerClsName_ = ""; /** * <code>optional string writer_cls_name = 4;</code> */ public boolean hasWriterClsName() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string writer_cls_name = 4;</code> */ public java.lang.String getWriterClsName() { java.lang.Object ref = writerClsName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { writerClsName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string writer_cls_name = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getWriterClsNameBytes() { java.lang.Object ref = writerClsName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); writerClsName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string writer_cls_name = 4;</code> */ public Builder setWriterClsName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; writerClsName_ = value; onChanged(); return this; } /** * <code>optional string writer_cls_name = 4;</code> */ public Builder clearWriterClsName() { bitField0_ = (bitField0_ & ~0x00000008); writerClsName_ = getDefaultInstance().getWriterClsName(); onChanged(); return this; } /** * <code>optional string writer_cls_name = 4;</code> */ public Builder setWriterClsNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; writerClsName_ = value; onChanged(); return this; } private java.lang.Object cellCodecClsName_ = ""; /** * <code>optional string cell_codec_cls_name = 5;</code> */ public boolean hasCellCodecClsName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public java.lang.String getCellCodecClsName() { java.lang.Object ref = cellCodecClsName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellCodecClsName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellCodecClsNameBytes() { java.lang.Object ref = cellCodecClsName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellCodecClsName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public Builder setCellCodecClsName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; cellCodecClsName_ = value; onChanged(); return this; } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public Builder clearCellCodecClsName() { bitField0_ = (bitField0_ & ~0x00000010); cellCodecClsName_ = getDefaultInstance().getCellCodecClsName(); onChanged(); return this; } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public Builder setCellCodecClsNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; cellCodecClsName_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.WALHeader) } // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALHeader> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<WALHeader>() { public WALHeader parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new WALHeader(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALHeader> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALHeader> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface WALKeyOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.WALKey) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes encoded_region_name = 1;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName(); /** * <code>required bytes table_name = 2;</code> */ boolean hasTableName(); /** * <code>required bytes table_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName(); /** * <code>required uint64 log_sequence_number = 3;</code> */ boolean hasLogSequenceNumber(); /** * <code>required uint64 log_sequence_number = 3;</code> */ long getLogSequenceNumber(); /** * <code>required uint64 write_time = 4;</code> */ boolean hasWriteTime(); /** * <code>required uint64 write_time = 4;</code> */ long getWriteTime(); /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated boolean hasClusterId(); /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId(); /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> getScopesList(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getScopes(int index); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ int getScopesCount(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesOrBuilderList(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index); /** * <code>optional uint32 following_kv_count = 7;</code> */ boolean hasFollowingKvCount(); /** * <code>optional uint32 following_kv_count = 7;</code> */ int getFollowingKvCount(); /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> getClusterIdsList(); /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index); /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ int getClusterIdsCount(); /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsOrBuilderList(); /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index); /** * <code>optional uint64 nonceGroup = 9;</code> */ boolean hasNonceGroup(); /** * <code>optional uint64 nonceGroup = 9;</code> */ long getNonceGroup(); /** * <code>optional uint64 nonce = 10;</code> */ boolean hasNonce(); /** * <code>optional uint64 nonce = 10;</code> */ long getNonce(); /** * <code>optional uint64 orig_sequence_number = 11;</code> */ boolean hasOrigSequenceNumber(); /** * <code>optional uint64 orig_sequence_number = 11;</code> */ long getOrigSequenceNumber(); } /** * <pre> * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header * for some KVs * </pre> * * Protobuf type {@code hbase.pb.WALKey} */ public static final class WALKey extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.WALKey) WALKeyOrBuilder { // Use WALKey.newBuilder() to construct. private WALKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WALKey() { encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; logSequenceNumber_ = 0L; writeTime_ = 0L; scopes_ = java.util.Collections.emptyList(); followingKvCount_ = 0; clusterIds_ = java.util.Collections.emptyList(); nonceGroup_ = 0L; nonce_ = 0L; origSequenceNumber_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WALKey( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; encodedRegionName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; tableName_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; logSequenceNumber_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; writeTime_ = input.readUInt64(); break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = clusterId_.toBuilder(); } clusterId_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(clusterId_); clusterId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 50: { if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>(); mutable_bitField0_ |= 0x00000020; } scopes_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry)); break; } case 56: { bitField0_ |= 0x00000020; followingKvCount_ = input.readUInt32(); break; } case 66: { if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>(); mutable_bitField0_ |= 0x00000080; } clusterIds_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry)); break; } case 72: { bitField0_ |= 0x00000040; nonceGroup_ = input.readUInt64(); break; } case 80: { bitField0_ |= 0x00000080; nonce_ = input.readUInt64(); break; } case 88: { bitField0_ |= 0x00000100; origSequenceNumber_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = java.util.Collections.unmodifiableList(scopes_); } if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder.class); } private int bitField0_; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 1;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes encoded_region_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3; private long logSequenceNumber_; /** * <code>required uint64 log_sequence_number = 3;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required uint64 log_sequence_number = 3;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } public static final int WRITE_TIME_FIELD_NUMBER = 4; private long writeTime_; /** * <code>required uint64 write_time = 4;</code> */ public boolean hasWriteTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required uint64 write_time = 4;</code> */ public long getWriteTime() { return writeTime_; } public static final int CLUSTER_ID_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_; /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId() { return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } public static final int SCOPES_FIELD_NUMBER = 6; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> scopes_; /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> getScopesList() { return scopes_; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesOrBuilderList() { return scopes_; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public int getScopesCount() { return scopes_.size(); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getScopes(int index) { return scopes_.get(index); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index) { return scopes_.get(index); } public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7; private int followingKvCount_; /** * <code>optional uint32 following_kv_count = 7;</code> */ public boolean hasFollowingKvCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 following_kv_count = 7;</code> */ public int getFollowingKvCount() { return followingKvCount_; } public static final int CLUSTER_IDS_FIELD_NUMBER = 8; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> clusterIds_; /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() { return clusterIds_; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsOrBuilderList() { return clusterIds_; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public int getClusterIdsCount() { return clusterIds_.size(); } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) { return clusterIds_.get(index); } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index) { return clusterIds_.get(index); } public static final int NONCEGROUP_FIELD_NUMBER = 9; private long nonceGroup_; /** * <code>optional uint64 nonceGroup = 9;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint64 nonceGroup = 9;</code> */ public long getNonceGroup() { return nonceGroup_; } public static final int NONCE_FIELD_NUMBER = 10; private long nonce_; /** * <code>optional uint64 nonce = 10;</code> */ public boolean hasNonce() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint64 nonce = 10;</code> */ public long getNonce() { return nonce_; } public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11; private long origSequenceNumber_; /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public boolean hasOrigSequenceNumber() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public long getOrigSequenceNumber() { return origSequenceNumber_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasLogSequenceNumber()) { memoizedIsInitialized = 0; return false; } if (!hasWriteTime()) { memoizedIsInitialized = 0; return false; } if (hasClusterId()) { if (!getClusterId().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getScopesCount(); i++) { if (!getScopes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getClusterIdsCount(); i++) { if (!getClusterIds(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, encodedRegionName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, logSequenceNumber_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, writeTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, getClusterId()); } for (int i = 0; i < scopes_.size(); i++) { output.writeMessage(6, scopes_.get(i)); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(7, followingKvCount_); } for (int i = 0; i < clusterIds_.size(); i++) { output.writeMessage(8, clusterIds_.get(i)); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt64(9, nonceGroup_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt64(10, nonce_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeUInt64(11, origSequenceNumber_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, encodedRegionName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, logSequenceNumber_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, writeTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getClusterId()); } for (int i = 0; i < scopes_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(6, scopes_.get(i)); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(7, followingKvCount_); } for (int i = 0; i < clusterIds_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(8, clusterIds_.get(i)); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(9, nonceGroup_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(10, nonce_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(11, origSequenceNumber_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) obj; boolean result = true; result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); } result = result && (hasWriteTime() == other.hasWriteTime()); if (hasWriteTime()) { result = result && (getWriteTime() == other.getWriteTime()); } result = result && (hasClusterId() == other.hasClusterId()); if (hasClusterId()) { result = result && getClusterId() .equals(other.getClusterId()); } result = result && getScopesList() .equals(other.getScopesList()); result = result && (hasFollowingKvCount() == other.hasFollowingKvCount()); if (hasFollowingKvCount()) { result = result && (getFollowingKvCount() == other.getFollowingKvCount()); } result = result && getClusterIdsList() .equals(other.getClusterIdsList()); result = result && (hasNonceGroup() == other.hasNonceGroup()); if (hasNonceGroup()) { result = result && (getNonceGroup() == other.getNonceGroup()); } result = result && (hasNonce() == other.hasNonce()); if (hasNonce()) { result = result && (getNonce() == other.getNonce()); } result = result && (hasOrigSequenceNumber() == other.hasOrigSequenceNumber()); if (hasOrigSequenceNumber()) { result = result && (getOrigSequenceNumber() == other.getOrigSequenceNumber()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getLogSequenceNumber()); } if (hasWriteTime()) { hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getWriteTime()); } if (hasClusterId()) { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); } if (getScopesCount() > 0) { hash = (37 * hash) + SCOPES_FIELD_NUMBER; hash = (53 * hash) + getScopesList().hashCode(); } if (hasFollowingKvCount()) { hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER; hash = (53 * hash) + getFollowingKvCount(); } if (getClusterIdsCount() > 0) { hash = (37 * hash) + CLUSTER_IDS_FIELD_NUMBER; hash = (53 * hash) + getClusterIdsList().hashCode(); } if (hasNonceGroup()) { hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNonce()); } if (hasOrigSequenceNumber()) { hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getOrigSequenceNumber()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header * for some KVs * </pre> * * Protobuf type {@code hbase.pb.WALKey} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.WALKey) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getClusterIdFieldBuilder(); getScopesFieldBuilder(); getClusterIdsFieldBuilder(); } } public Builder clear() { super.clear(); encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); logSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); writeTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (clusterIdBuilder_ == null) { clusterId_ = null; } else { clusterIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); if (scopesBuilder_ == null) { scopes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { scopesBuilder_.clear(); } followingKvCount_ = 0; bitField0_ = (bitField0_ & ~0x00000040); if (clusterIdsBuilder_ == null) { clusterIds_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000080); } else { clusterIdsBuilder_.clear(); } nonceGroup_ = 0L; bitField0_ = (bitField0_ & ~0x00000100); nonce_ = 0L; bitField0_ = (bitField0_ & ~0x00000200); origSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000400); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.logSequenceNumber_ = logSequenceNumber_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.writeTime_ = writeTime_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (clusterIdBuilder_ == null) { result.clusterId_ = clusterId_; } else { result.clusterId_ = clusterIdBuilder_.build(); } if (scopesBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = java.util.Collections.unmodifiableList(scopes_); bitField0_ = (bitField0_ & ~0x00000020); } result.scopes_ = scopes_; } else { result.scopes_ = scopesBuilder_.build(); } if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000020; } result.followingKvCount_ = followingKvCount_; if (clusterIdsBuilder_ == null) { if (((bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_); bitField0_ = (bitField0_ & ~0x00000080); } result.clusterIds_ = clusterIds_; } else { result.clusterIds_ = clusterIdsBuilder_.build(); } if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000040; } result.nonceGroup_ = nonceGroup_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000080; } result.nonce_ = nonce_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000100; } result.origSequenceNumber_ = origSequenceNumber_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); } if (other.hasWriteTime()) { setWriteTime(other.getWriteTime()); } if (other.hasClusterId()) { mergeClusterId(other.getClusterId()); } if (scopesBuilder_ == null) { if (!other.scopes_.isEmpty()) { if (scopes_.isEmpty()) { scopes_ = other.scopes_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureScopesIsMutable(); scopes_.addAll(other.scopes_); } onChanged(); } } else { if (!other.scopes_.isEmpty()) { if (scopesBuilder_.isEmpty()) { scopesBuilder_.dispose(); scopesBuilder_ = null; scopes_ = other.scopes_; bitField0_ = (bitField0_ & ~0x00000020); scopesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getScopesFieldBuilder() : null; } else { scopesBuilder_.addAllMessages(other.scopes_); } } } if (other.hasFollowingKvCount()) { setFollowingKvCount(other.getFollowingKvCount()); } if (clusterIdsBuilder_ == null) { if (!other.clusterIds_.isEmpty()) { if (clusterIds_.isEmpty()) { clusterIds_ = other.clusterIds_; bitField0_ = (bitField0_ & ~0x00000080); } else { ensureClusterIdsIsMutable(); clusterIds_.addAll(other.clusterIds_); } onChanged(); } } else { if (!other.clusterIds_.isEmpty()) { if (clusterIdsBuilder_.isEmpty()) { clusterIdsBuilder_.dispose(); clusterIdsBuilder_ = null; clusterIds_ = other.clusterIds_; bitField0_ = (bitField0_ & ~0x00000080); clusterIdsBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getClusterIdsFieldBuilder() : null; } else { clusterIdsBuilder_.addAllMessages(other.clusterIds_); } } } if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); } if (other.hasNonce()) { setNonce(other.getNonce()); } if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasEncodedRegionName()) { return false; } if (!hasTableName()) { return false; } if (!hasLogSequenceNumber()) { return false; } if (!hasWriteTime()) { return false; } if (hasClusterId()) { if (!getClusterId().isInitialized()) { return false; } } for (int i = 0; i < getScopesCount(); i++) { if (!getScopes(i).isInitialized()) { return false; } } for (int i = 0; i < getClusterIdsCount(); i++) { if (!getClusterIds(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 1;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes encoded_region_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 1;</code> */ public Builder setEncodedRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 1;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 2;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 2;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } private long logSequenceNumber_ ; /** * <code>required uint64 log_sequence_number = 3;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required uint64 log_sequence_number = 3;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } /** * <code>required uint64 log_sequence_number = 3;</code> */ public Builder setLogSequenceNumber(long value) { bitField0_ |= 0x00000004; logSequenceNumber_ = value; onChanged(); return this; } /** * <code>required uint64 log_sequence_number = 3;</code> */ public Builder clearLogSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000004); logSequenceNumber_ = 0L; onChanged(); return this; } private long writeTime_ ; /** * <code>required uint64 write_time = 4;</code> */ public boolean hasWriteTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required uint64 write_time = 4;</code> */ public long getWriteTime() { return writeTime_; } /** * <code>required uint64 write_time = 4;</code> */ public Builder setWriteTime(long value) { bitField0_ |= 0x00000008; writeTime_ = value; onChanged(); return this; } /** * <code>required uint64 write_time = 4;</code> */ public Builder clearWriteTime() { bitField0_ = (bitField0_ & ~0x00000008); writeTime_ = 0L; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_; /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterId() { if (clusterIdBuilder_ == null) { return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } else { return clusterIdBuilder_.getMessage(); } } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } clusterId_ = value; onChanged(); } else { clusterIdBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setClusterId( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdBuilder_ == null) { clusterId_ = builderForValue.build(); onChanged(); } else { clusterIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder mergeClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && clusterId_ != null && clusterId_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) { clusterId_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); } else { clusterId_ = value; } onChanged(); } else { clusterIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder clearClusterId() { if (clusterIdBuilder_ == null) { clusterId_ = null; onChanged(); } else { clusterIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() { bitField0_ |= 0x00000010; onChanged(); return getClusterIdFieldBuilder().getBuilder(); } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); } else { return clusterId_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance() : clusterId_; } } /** * <pre> *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> * * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdFieldBuilder() { if (clusterIdBuilder_ == null) { clusterIdBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>( getClusterId(), getParentForChildren(), isClean()); clusterId_ = null; } return clusterIdBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> scopes_ = java.util.Collections.emptyList(); private void ensureScopesIsMutable() { if (!((bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>(scopes_); bitField0_ |= 0x00000020; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_; /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> getScopesList() { if (scopesBuilder_ == null) { return java.util.Collections.unmodifiableList(scopes_); } else { return scopesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public int getScopesCount() { if (scopesBuilder_ == null) { return scopes_.size(); } else { return scopesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getScopes(int index) { if (scopesBuilder_ == null) { return scopes_.get(index); } else { return scopesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder setScopes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope value) { if (scopesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScopesIsMutable(); scopes_.set(index, value); onChanged(); } else { scopesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder setScopes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.set(index, builderForValue.build()); onChanged(); } else { scopesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope value) { if (scopesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScopesIsMutable(); scopes_.add(value); onChanged(); } else { scopesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope value) { if (scopesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScopesIsMutable(); scopes_.add(index, value); onChanged(); } else { scopesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.add(builderForValue.build()); onChanged(); } else { scopesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.add(index, builderForValue.build()); onChanged(); } else { scopesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addAllScopes( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> values) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, scopes_); onChanged(); } else { scopesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder clearScopes() { if (scopesBuilder_ == null) { scopes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { scopesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder removeScopes(int index) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.remove(index); onChanged(); } else { scopesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder getScopesBuilder( int index) { return getScopesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index) { if (scopesBuilder_ == null) { return scopes_.get(index); } else { return scopesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesOrBuilderList() { if (scopesBuilder_ != null) { return scopesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(scopes_); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder() { return getScopesFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder( int index) { return getScopesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder> getScopesBuilderList() { return getScopesFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesFieldBuilder() { if (scopesBuilder_ == null) { scopesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>( scopes_, ((bitField0_ & 0x00000020) == 0x00000020), getParentForChildren(), isClean()); scopes_ = null; } return scopesBuilder_; } private int followingKvCount_ ; /** * <code>optional uint32 following_kv_count = 7;</code> */ public boolean hasFollowingKvCount() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 following_kv_count = 7;</code> */ public int getFollowingKvCount() { return followingKvCount_; } /** * <code>optional uint32 following_kv_count = 7;</code> */ public Builder setFollowingKvCount(int value) { bitField0_ |= 0x00000040; followingKvCount_ = value; onChanged(); return this; } /** * <code>optional uint32 following_kv_count = 7;</code> */ public Builder clearFollowingKvCount() { bitField0_ = (bitField0_ & ~0x00000040); followingKvCount_ = 0; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> clusterIds_ = java.util.Collections.emptyList(); private void ensureClusterIdsIsMutable() { if (!((bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>(clusterIds_); bitField0_ |= 0x00000080; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdsBuilder_; /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() { if (clusterIdsBuilder_ == null) { return java.util.Collections.unmodifiableList(clusterIds_); } else { return clusterIdsBuilder_.getMessageList(); } } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public int getClusterIdsCount() { if (clusterIdsBuilder_ == null) { return clusterIds_.size(); } else { return clusterIdsBuilder_.getCount(); } } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) { if (clusterIdsBuilder_ == null) { return clusterIds_.get(index); } else { return clusterIdsBuilder_.getMessage(index); } } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder setClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClusterIdsIsMutable(); clusterIds_.set(index, value); onChanged(); } else { clusterIdsBuilder_.setMessage(index, value); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder setClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.set(index, builderForValue.build()); onChanged(); } else { clusterIdsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder addClusterIds(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClusterIdsIsMutable(); clusterIds_.add(value); onChanged(); } else { clusterIdsBuilder_.addMessage(value); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder addClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClusterIdsIsMutable(); clusterIds_.add(index, value); onChanged(); } else { clusterIdsBuilder_.addMessage(index, value); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder addClusterIds( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.add(builderForValue.build()); onChanged(); } else { clusterIdsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder addClusterIds( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.add(index, builderForValue.build()); onChanged(); } else { clusterIdsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder addAllClusterIds( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> values) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, clusterIds_); onChanged(); } else { clusterIdsBuilder_.addAllMessages(values); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder clearClusterIds() { if (clusterIdsBuilder_ == null) { clusterIds_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); } else { clusterIdsBuilder_.clear(); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public Builder removeClusterIds(int index) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.remove(index); onChanged(); } else { clusterIdsBuilder_.remove(index); } return this; } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdsBuilder( int index) { return getClusterIdsFieldBuilder().getBuilder(index); } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index) { if (clusterIdsBuilder_ == null) { return clusterIds_.get(index); } else { return clusterIdsBuilder_.getMessageOrBuilder(index); } } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsOrBuilderList() { if (clusterIdsBuilder_ != null) { return clusterIdsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(clusterIds_); } } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder() { return getClusterIdsFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()); } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder( int index) { return getClusterIdsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()); } /** * <pre> *This field contains the list of clusters that have *consumed the change * </pre> * * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder> getClusterIdsBuilderList() { return getClusterIdsFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsFieldBuilder() { if (clusterIdsBuilder_ == null) { clusterIdsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>( clusterIds_, ((bitField0_ & 0x00000080) == 0x00000080), getParentForChildren(), isClean()); clusterIds_ = null; } return clusterIdsBuilder_; } private long nonceGroup_ ; /** * <code>optional uint64 nonceGroup = 9;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 nonceGroup = 9;</code> */ public long getNonceGroup() { return nonceGroup_; } /** * <code>optional uint64 nonceGroup = 9;</code> */ public Builder setNonceGroup(long value) { bitField0_ |= 0x00000100; nonceGroup_ = value; onChanged(); return this; } /** * <code>optional uint64 nonceGroup = 9;</code> */ public Builder clearNonceGroup() { bitField0_ = (bitField0_ & ~0x00000100); nonceGroup_ = 0L; onChanged(); return this; } private long nonce_ ; /** * <code>optional uint64 nonce = 10;</code> */ public boolean hasNonce() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional uint64 nonce = 10;</code> */ public long getNonce() { return nonce_; } /** * <code>optional uint64 nonce = 10;</code> */ public Builder setNonce(long value) { bitField0_ |= 0x00000200; nonce_ = value; onChanged(); return this; } /** * <code>optional uint64 nonce = 10;</code> */ public Builder clearNonce() { bitField0_ = (bitField0_ & ~0x00000200); nonce_ = 0L; onChanged(); return this; } private long origSequenceNumber_ ; /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public boolean hasOrigSequenceNumber() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public long getOrigSequenceNumber() { return origSequenceNumber_; } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public Builder setOrigSequenceNumber(long value) { bitField0_ |= 0x00000400; origSequenceNumber_ = value; onChanged(); return this; } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public Builder clearOrigSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000400); origSequenceNumber_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.WALKey) } // @@protoc_insertion_point(class_scope:hbase.pb.WALKey) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALKey> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<WALKey>() { public WALKey parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new WALKey(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALKey> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALKey> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FamilyScopeOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.FamilyScope) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes family = 1;</code> */ boolean hasFamily(); /** * <code>required bytes family = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily(); /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ boolean hasScopeType(); /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType getScopeType(); } /** * Protobuf type {@code hbase.pb.FamilyScope} */ public static final class FamilyScope extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.FamilyScope) FamilyScopeOrBuilder { // Use FamilyScope.newBuilder() to construct. private FamilyScope(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FamilyScope() { family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; scopeType_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FamilyScope( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; family_ = input.readBytes(); break; } case 16: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType value = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; scopeType_ = rawValue; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder.class); } private int bitField0_; public static final int FAMILY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } public static final int SCOPE_TYPE_FIELD_NUMBER = 2; private int scopeType_; /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType getScopeType() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.valueOf(scopeType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; return false; } if (!hasScopeType()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, scopeType_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(2, scopeType_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { result = result && getFamily() .equals(other.getFamily()); } result = result && (hasScopeType() == other.hasScopeType()); if (hasScopeType()) { result = result && scopeType_ == other.scopeType_; } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } if (hasScopeType()) { hash = (37 * hash) + SCOPE_TYPE_FIELD_NUMBER; hash = (53 * hash) + scopeType_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FamilyScope} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.FamilyScope) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); scopeType_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.family_ = family_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.scopeType_ = scopeType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } if (other.hasScopeType()) { setScopeType(other.getScopeType()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { return false; } if (!hasScopeType()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } /** * <code>required bytes family = 1;</code> */ public Builder setFamily(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; family_ = value; onChanged(); return this; } /** * <code>required bytes family = 1;</code> */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } private int scopeType_ = 0; /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType getScopeType() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.valueOf(scopeType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL : result; } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public Builder setScopeType(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.ScopeType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; scopeType_ = value.getNumber(); onChanged(); return this; } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public Builder clearScopeType() { bitField0_ = (bitField0_ & ~0x00000002); scopeType_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.FamilyScope) } // @@protoc_insertion_point(class_scope:hbase.pb.FamilyScope) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FamilyScope> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FamilyScope>() { public FamilyScope parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FamilyScope(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FamilyScope> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FamilyScope> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CompactionDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CompactionDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ boolean hasTableName(); /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName(); /** * <code>required bytes encoded_region_name = 2;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName(); /** * <code>required bytes family_name = 3;</code> */ boolean hasFamilyName(); /** * <code>required bytes family_name = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName(); /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ java.util.List<java.lang.String> getCompactionInputList(); /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ int getCompactionInputCount(); /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ java.lang.String getCompactionInput(int index); /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCompactionInputBytes(int index); /** * <code>repeated string compaction_output = 5;</code> */ java.util.List<java.lang.String> getCompactionOutputList(); /** * <code>repeated string compaction_output = 5;</code> */ int getCompactionOutputCount(); /** * <code>repeated string compaction_output = 5;</code> */ java.lang.String getCompactionOutput(int index); /** * <code>repeated string compaction_output = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCompactionOutputBytes(int index); /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ boolean hasStoreHomeDir(); /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ java.lang.String getStoreHomeDir(); /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes(); /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ boolean hasRegionName(); /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName(); } /** * <pre> ** * Special WAL entry to hold all related to a compaction. * Written to WAL before completing compaction. There is * sufficient info in the below message to complete later * the * compaction should we fail the WAL write. * </pre> * * Protobuf type {@code hbase.pb.CompactionDescriptor} */ public static final class CompactionDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CompactionDescriptor) CompactionDescriptorOrBuilder { // Use CompactionDescriptor.newBuilder() to construct. private CompactionDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CompactionDescriptor() { tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; compactionInput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; compactionOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; storeHomeDir_ = ""; regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CompactionDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; encodedRegionName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; familyName_ = input.readBytes(); break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000008; } compactionInput_.add(bs); break; } case 42: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000010; } compactionOutput_.add(bs); break; } case 50: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; storeHomeDir_ = bs; break; } case 58: { bitField0_ |= 0x00000010; regionName_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = compactionInput_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = compactionOutput_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class); } private int bitField0_; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_; /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } public static final int FAMILY_NAME_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString familyName_; /** * <code>required bytes family_name = 3;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes family_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName() { return familyName_; } public static final int COMPACTION_INPUT_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList compactionInput_; /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getCompactionInputList() { return compactionInput_; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public int getCompactionInputCount() { return compactionInput_.size(); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public java.lang.String getCompactionInput(int index) { return compactionInput_.get(index); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCompactionInputBytes(int index) { return compactionInput_.getByteString(index); } public static final int COMPACTION_OUTPUT_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList compactionOutput_; /** * <code>repeated string compaction_output = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getCompactionOutputList() { return compactionOutput_; } /** * <code>repeated string compaction_output = 5;</code> */ public int getCompactionOutputCount() { return compactionOutput_.size(); } /** * <code>repeated string compaction_output = 5;</code> */ public java.lang.String getCompactionOutput(int index) { return compactionOutput_.get(index); } /** * <code>repeated string compaction_output = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCompactionOutputBytes(int index) { return compactionOutput_.getByteString(index); } public static final int STORE_HOME_DIR_FIELD_NUMBER = 6; private volatile java.lang.Object storeHomeDir_; /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int REGION_NAME_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString regionName_; /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName() { return regionName_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasStoreHomeDir()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encodedRegionName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, familyName_); } for (int i = 0; i < compactionInput_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, compactionInput_.getRaw(i)); } for (int i = 0; i < compactionOutput_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, compactionOutput_.getRaw(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, storeHomeDir_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(7, regionName_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, encodedRegionName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, familyName_); } { int dataSize = 0; for (int i = 0; i < compactionInput_.size(); i++) { dataSize += computeStringSizeNoTag(compactionInput_.getRaw(i)); } size += dataSize; size += 1 * getCompactionInputList().size(); } { int dataSize = 0; for (int i = 0; i < compactionOutput_.size(); i++) { dataSize += computeStringSizeNoTag(compactionOutput_.getRaw(i)); } size += dataSize; size += 1 * getCompactionOutputList().size(); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(6, storeHomeDir_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(7, regionName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && getCompactionInputList() .equals(other.getCompactionInputList()); result = result && getCompactionOutputList() .equals(other.getCompactionOutputList()); result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); if (hasStoreHomeDir()) { result = result && getStoreHomeDir() .equals(other.getStoreHomeDir()); } result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { result = result && getRegionName() .equals(other.getRegionName()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (getCompactionInputCount() > 0) { hash = (37 * hash) + COMPACTION_INPUT_FIELD_NUMBER; hash = (53 * hash) + getCompactionInputList().hashCode(); } if (getCompactionOutputCount() > 0) { hash = (37 * hash) + COMPACTION_OUTPUT_FIELD_NUMBER; hash = (53 * hash) + getCompactionOutputList().hashCode(); } if (hasStoreHomeDir()) { hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; hash = (53 * hash) + getStoreHomeDir().hashCode(); } if (hasRegionName()) { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Special WAL entry to hold all related to a compaction. * Written to WAL before completing compaction. There is * sufficient info in the below message to complete later * the * compaction should we fail the WAL write. * </pre> * * Protobuf type {@code hbase.pb.CompactionDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CompactionDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); compactionInput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); compactionOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); storeHomeDir_ = ""; bitField0_ = (bitField0_ & ~0x00000020); regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000040); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.familyName_ = familyName_; if (((bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = compactionInput_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000008); } result.compactionInput_ = compactionInput_; if (((bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = compactionOutput_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000010); } result.compactionOutput_ = compactionOutput_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000008; } result.storeHomeDir_ = storeHomeDir_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000010; } result.regionName_ = regionName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (!other.compactionInput_.isEmpty()) { if (compactionInput_.isEmpty()) { compactionInput_ = other.compactionInput_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureCompactionInputIsMutable(); compactionInput_.addAll(other.compactionInput_); } onChanged(); } if (!other.compactionOutput_.isEmpty()) { if (compactionOutput_.isEmpty()) { compactionOutput_ = other.compactionOutput_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureCompactionOutputIsMutable(); compactionOutput_.addAll(other.compactionOutput_); } onChanged(); } if (other.hasStoreHomeDir()) { bitField0_ |= 0x00000020; storeHomeDir_ = other.storeHomeDir_; onChanged(); } if (other.hasRegionName()) { setRegionName(other.getRegionName()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } if (!hasFamilyName()) { return false; } if (!hasStoreHomeDir()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } /** * <pre> * TODO: WALKey already stores these, might remove * </pre> * * <code>required bytes table_name = 1;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder setEncodedRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 3;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes family_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName() { return familyName_; } /** * <code>required bytes family_name = 3;</code> */ public Builder setFamilyName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; familyName_ = value; onChanged(); return this; } /** * <code>required bytes family_name = 3;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000004); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList compactionInput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureCompactionInputIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(compactionInput_); bitField0_ |= 0x00000008; } } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getCompactionInputList() { return compactionInput_.getUnmodifiableView(); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public int getCompactionInputCount() { return compactionInput_.size(); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public java.lang.String getCompactionInput(int index) { return compactionInput_.get(index); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCompactionInputBytes(int index) { return compactionInput_.getByteString(index); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public Builder setCompactionInput( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionInputIsMutable(); compactionInput_.set(index, value); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public Builder addCompactionInput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionInputIsMutable(); compactionInput_.add(value); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public Builder addAllCompactionInput( java.lang.Iterable<java.lang.String> values) { ensureCompactionInputIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, compactionInput_); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public Builder clearCompactionInput() { compactionInput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string compaction_input = 4;</code> */ public Builder addCompactionInputBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureCompactionInputIsMutable(); compactionInput_.add(value); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList compactionOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureCompactionOutputIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(compactionOutput_); bitField0_ |= 0x00000010; } } /** * <code>repeated string compaction_output = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getCompactionOutputList() { return compactionOutput_.getUnmodifiableView(); } /** * <code>repeated string compaction_output = 5;</code> */ public int getCompactionOutputCount() { return compactionOutput_.size(); } /** * <code>repeated string compaction_output = 5;</code> */ public java.lang.String getCompactionOutput(int index) { return compactionOutput_.get(index); } /** * <code>repeated string compaction_output = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCompactionOutputBytes(int index) { return compactionOutput_.getByteString(index); } /** * <code>repeated string compaction_output = 5;</code> */ public Builder setCompactionOutput( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionOutputIsMutable(); compactionOutput_.set(index, value); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder addCompactionOutput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionOutputIsMutable(); compactionOutput_.add(value); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder addAllCompactionOutput( java.lang.Iterable<java.lang.String> values) { ensureCompactionOutputIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, compactionOutput_); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder clearCompactionOutput() { compactionOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder addCompactionOutputBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureCompactionOutputIsMutable(); compactionOutput_.add(value); onChanged(); return this; } private java.lang.Object storeHomeDir_ = ""; /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public Builder setStoreHomeDir( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; storeHomeDir_ = value; onChanged(); return this; } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000020); storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); onChanged(); return this; } /** * <pre> * relative to region dir * </pre> * * <code>required string store_home_dir = 6;</code> */ public Builder setStoreHomeDirBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; storeHomeDir_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName() { return regionName_; } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public Builder setRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; regionName_ = value; onChanged(); return this; } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000040); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CompactionDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.CompactionDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CompactionDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CompactionDescriptor>() { public CompactionDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CompactionDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CompactionDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CompactionDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FlushDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.FlushDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ boolean hasAction(); /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction(); /** * <code>required bytes table_name = 2;</code> */ boolean hasTableName(); /** * <code>required bytes table_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName(); /** * <code>required bytes encoded_region_name = 3;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName(); /** * <code>optional uint64 flush_sequence_number = 4;</code> */ boolean hasFlushSequenceNumber(); /** * <code>optional uint64 flush_sequence_number = 4;</code> */ long getFlushSequenceNumber(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ int getStoreFlushesCount(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesOrBuilderList(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index); /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ boolean hasRegionName(); /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName(); } /** * <pre> ** * Special WAL entry to hold all related to a flush. * </pre> * * Protobuf type {@code hbase.pb.FlushDescriptor} */ public static final class FlushDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.FlushDescriptor) FlushDescriptorOrBuilder { // Use FlushDescriptor.newBuilder() to construct. private FlushDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FlushDescriptor() { action_ = 0; tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; flushSequenceNumber_ = 0L; storeFlushes_ = java.util.Collections.emptyList(); regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FlushDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; action_ = rawValue; } break; } case 18: { bitField0_ |= 0x00000002; tableName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; encodedRegionName_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; flushSequenceNumber_ = input.readUInt64(); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>(); mutable_bitField0_ |= 0x00000010; } storeFlushes_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.PARSER, extensionRegistry)); break; } case 50: { bitField0_ |= 0x00000010; regionName_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.Builder.class); } /** * Protobuf enum {@code hbase.pb.FlushDescriptor.FlushAction} */ public enum FlushAction implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>START_FLUSH = 0;</code> */ START_FLUSH(0), /** * <code>COMMIT_FLUSH = 1;</code> */ COMMIT_FLUSH(1), /** * <code>ABORT_FLUSH = 2;</code> */ ABORT_FLUSH(2), /** * <pre> * marker for indicating that a flush has been requested but cannot complete * </pre> * * <code>CANNOT_FLUSH = 3;</code> */ CANNOT_FLUSH(3), ; /** * <code>START_FLUSH = 0;</code> */ public static final int START_FLUSH_VALUE = 0; /** * <code>COMMIT_FLUSH = 1;</code> */ public static final int COMMIT_FLUSH_VALUE = 1; /** * <code>ABORT_FLUSH = 2;</code> */ public static final int ABORT_FLUSH_VALUE = 2; /** * <pre> * marker for indicating that a flush has been requested but cannot complete * </pre> * * <code>CANNOT_FLUSH = 3;</code> */ public static final int CANNOT_FLUSH_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static FlushAction valueOf(int value) { return forNumber(value); } public static FlushAction forNumber(int value) { switch (value) { case 0: return START_FLUSH; case 1: return COMMIT_FLUSH; case 2: return ABORT_FLUSH; case 3: return CANNOT_FLUSH; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<FlushAction> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< FlushAction> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<FlushAction>() { public FlushAction findValueByNumber(int number) { return FlushAction.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.getDescriptor().getEnumTypes().get(0); } private static final FlushAction[] VALUES = values(); public static FlushAction valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private FlushAction(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.FlushDescriptor.FlushAction) } public interface StoreFlushDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.FlushDescriptor.StoreFlushDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes family_name = 1;</code> */ boolean hasFamilyName(); /** * <code>required bytes family_name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName(); /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ boolean hasStoreHomeDir(); /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ java.lang.String getStoreHomeDir(); /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes(); /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ java.util.List<java.lang.String> getFlushOutputList(); /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ int getFlushOutputCount(); /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ java.lang.String getFlushOutput(int index); /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFlushOutputBytes(int index); } /** * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor} */ public static final class StoreFlushDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.FlushDescriptor.StoreFlushDescriptor) StoreFlushDescriptorOrBuilder { // Use StoreFlushDescriptor.newBuilder() to construct. private StoreFlushDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private StoreFlushDescriptor() { familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; storeHomeDir_ = ""; flushOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StoreFlushDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; familyName_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; storeHomeDir_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } flushOutput_.add(bs); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = flushOutput_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class); } private int bitField0_; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString familyName_; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName() { return familyName_; } public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; private volatile java.lang.Object storeHomeDir_; /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int FLUSH_OUTPUT_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList flushOutput_; /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getFlushOutputList() { return flushOutput_; } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public int getFlushOutputCount() { return flushOutput_.size(); } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public java.lang.String getFlushOutput(int index) { return flushOutput_.get(index); } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFlushOutputBytes(int index) { return flushOutput_.getByteString(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasStoreHomeDir()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, storeHomeDir_); } for (int i = 0; i < flushOutput_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, flushOutput_.getRaw(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, storeHomeDir_); } { int dataSize = 0; for (int i = 0; i < flushOutput_.size(); i++) { dataSize += computeStringSizeNoTag(flushOutput_.getRaw(i)); } size += dataSize; size += 1 * getFlushOutputList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); if (hasStoreHomeDir()) { result = result && getStoreHomeDir() .equals(other.getStoreHomeDir()); } result = result && getFlushOutputList() .equals(other.getFlushOutputList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasStoreHomeDir()) { hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; hash = (53 * hash) + getStoreHomeDir().hashCode(); } if (getFlushOutputCount() > 0) { hash = (37 * hash) + FLUSH_OUTPUT_FIELD_NUMBER; hash = (53 * hash) + getFlushOutputList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.FlushDescriptor.StoreFlushDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); storeHomeDir_ = ""; bitField0_ = (bitField0_ & ~0x00000002); flushOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.storeHomeDir_ = storeHomeDir_; if (((bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = flushOutput_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.flushOutput_ = flushOutput_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()) return this; if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (other.hasStoreHomeDir()) { bitField0_ |= 0x00000002; storeHomeDir_ = other.storeHomeDir_; onChanged(); } if (!other.flushOutput_.isEmpty()) { if (flushOutput_.isEmpty()) { flushOutput_ = other.flushOutput_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureFlushOutputIsMutable(); flushOutput_.addAll(other.flushOutput_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { return false; } if (!hasStoreHomeDir()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName() { return familyName_; } /** * <code>required bytes family_name = 1;</code> */ public Builder setFamilyName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; familyName_ = value; onChanged(); return this; } /** * <code>required bytes family_name = 1;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000001); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; } private java.lang.Object storeHomeDir_ = ""; /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public Builder setStoreHomeDir( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000002); storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); onChanged(); return this; } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public Builder setStoreHomeDirBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList flushOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureFlushOutputIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(flushOutput_); bitField0_ |= 0x00000004; } } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getFlushOutputList() { return flushOutput_.getUnmodifiableView(); } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public int getFlushOutputCount() { return flushOutput_.size(); } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public java.lang.String getFlushOutput(int index) { return flushOutput_.get(index); } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFlushOutputBytes(int index) { return flushOutput_.getByteString(index); } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public Builder setFlushOutput( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFlushOutputIsMutable(); flushOutput_.set(index, value); onChanged(); return this; } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public Builder addFlushOutput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFlushOutputIsMutable(); flushOutput_.add(value); onChanged(); return this; } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public Builder addAllFlushOutput( java.lang.Iterable<java.lang.String> values) { ensureFlushOutputIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, flushOutput_); onChanged(); return this; } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public Builder clearFlushOutput() { flushOutput_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> * * <code>repeated string flush_output = 3;</code> */ public Builder addFlushOutputBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureFlushOutputIsMutable(); flushOutput_.add(value); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StoreFlushDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<StoreFlushDescriptor>() { public StoreFlushDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new StoreFlushDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StoreFlushDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StoreFlushDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private int bitField0_; public static final int ACTION_FIELD_NUMBER = 1; private int action_; /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public boolean hasAction() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(action_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH : result; } public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } public static final int FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER = 4; private long flushSequenceNumber_; /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public boolean hasFlushSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public long getFlushSequenceNumber() { return flushSequenceNumber_; } public static final int STORE_FLUSHES_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_; /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() { return storeFlushes_; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesOrBuilderList() { return storeFlushes_; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public int getStoreFlushesCount() { return storeFlushes_.size(); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) { return storeFlushes_.get(index); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index) { return storeFlushes_.get(index); } public static final int REGION_NAME_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString regionName_; /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName() { return regionName_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasAction()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getStoreFlushesCount(); i++) { if (!getStoreFlushes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, action_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, flushSequenceNumber_); } for (int i = 0; i < storeFlushes_.size(); i++) { output.writeMessage(5, storeFlushes_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(6, regionName_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, action_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, flushSequenceNumber_); } for (int i = 0; i < storeFlushes_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, storeFlushes_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(6, regionName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) obj; boolean result = true; result = result && (hasAction() == other.hasAction()); if (hasAction()) { result = result && action_ == other.action_; } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasFlushSequenceNumber() == other.hasFlushSequenceNumber()); if (hasFlushSequenceNumber()) { result = result && (getFlushSequenceNumber() == other.getFlushSequenceNumber()); } result = result && getStoreFlushesList() .equals(other.getStoreFlushesList()); result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { result = result && getRegionName() .equals(other.getRegionName()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAction()) { hash = (37 * hash) + ACTION_FIELD_NUMBER; hash = (53 * hash) + action_; } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasFlushSequenceNumber()) { hash = (37 * hash) + FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getFlushSequenceNumber()); } if (getStoreFlushesCount() > 0) { hash = (37 * hash) + STORE_FLUSHES_FIELD_NUMBER; hash = (53 * hash) + getStoreFlushesList().hashCode(); } if (hasRegionName()) { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Special WAL entry to hold all related to a flush. * </pre> * * Protobuf type {@code hbase.pb.FlushDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.FlushDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getStoreFlushesFieldBuilder(); } } public Builder clear() { super.clear(); action_ = 0; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); flushSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (storeFlushesBuilder_ == null) { storeFlushes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { storeFlushesBuilder_.clear(); } regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.action_ = action_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.flushSequenceNumber_ = flushSequenceNumber_; if (storeFlushesBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_); bitField0_ = (bitField0_ & ~0x00000010); } result.storeFlushes_ = storeFlushes_; } else { result.storeFlushes_ = storeFlushesBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000010; } result.regionName_ = regionName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance()) return this; if (other.hasAction()) { setAction(other.getAction()); } if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasFlushSequenceNumber()) { setFlushSequenceNumber(other.getFlushSequenceNumber()); } if (storeFlushesBuilder_ == null) { if (!other.storeFlushes_.isEmpty()) { if (storeFlushes_.isEmpty()) { storeFlushes_ = other.storeFlushes_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureStoreFlushesIsMutable(); storeFlushes_.addAll(other.storeFlushes_); } onChanged(); } } else { if (!other.storeFlushes_.isEmpty()) { if (storeFlushesBuilder_.isEmpty()) { storeFlushesBuilder_.dispose(); storeFlushesBuilder_ = null; storeFlushes_ = other.storeFlushes_; bitField0_ = (bitField0_ & ~0x00000010); storeFlushesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoreFlushesFieldBuilder() : null; } else { storeFlushesBuilder_.addAllMessages(other.storeFlushes_); } } } if (other.hasRegionName()) { setRegionName(other.getRegionName()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasAction()) { return false; } if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } for (int i = 0; i < getStoreFlushesCount(); i++) { if (!getStoreFlushes(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int action_ = 0; /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public boolean hasAction() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(action_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH : result; } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public Builder setAction(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; action_ = value.getNumber(); onChanged(); return this; } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public Builder clearAction() { bitField0_ = (bitField0_ & ~0x00000001); action_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 2;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 2;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder setEncodedRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000004); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } private long flushSequenceNumber_ ; /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public boolean hasFlushSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public long getFlushSequenceNumber() { return flushSequenceNumber_; } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public Builder setFlushSequenceNumber(long value) { bitField0_ |= 0x00000008; flushSequenceNumber_ = value; onChanged(); return this; } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public Builder clearFlushSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000008); flushSequenceNumber_ = 0L; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_ = java.util.Collections.emptyList(); private void ensureStoreFlushesIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>(storeFlushes_); bitField0_ |= 0x00000010; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> storeFlushesBuilder_; /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() { if (storeFlushesBuilder_ == null) { return java.util.Collections.unmodifiableList(storeFlushes_); } else { return storeFlushesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public int getStoreFlushesCount() { if (storeFlushesBuilder_ == null) { return storeFlushes_.size(); } else { return storeFlushesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) { if (storeFlushesBuilder_ == null) { return storeFlushes_.get(index); } else { return storeFlushesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder setStoreFlushes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) { if (storeFlushesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoreFlushesIsMutable(); storeFlushes_.set(index, value); onChanged(); } else { storeFlushesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder setStoreFlushes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.set(index, builderForValue.build()); onChanged(); } else { storeFlushesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) { if (storeFlushesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoreFlushesIsMutable(); storeFlushes_.add(value); onChanged(); } else { storeFlushesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) { if (storeFlushesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoreFlushesIsMutable(); storeFlushes_.add(index, value); onChanged(); } else { storeFlushesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.add(builderForValue.build()); onChanged(); } else { storeFlushesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.add(index, builderForValue.build()); onChanged(); } else { storeFlushesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addAllStoreFlushes( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> values) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, storeFlushes_); onChanged(); } else { storeFlushesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder clearStoreFlushes() { if (storeFlushesBuilder_ == null) { storeFlushes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { storeFlushesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder removeStoreFlushes(int index) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.remove(index); onChanged(); } else { storeFlushesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder getStoreFlushesBuilder( int index) { return getStoreFlushesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index) { if (storeFlushesBuilder_ == null) { return storeFlushes_.get(index); } else { return storeFlushesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesOrBuilderList() { if (storeFlushesBuilder_ != null) { return storeFlushesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(storeFlushes_); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder() { return getStoreFlushesFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder( int index) { return getStoreFlushesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder> getStoreFlushesBuilderList() { return getStoreFlushesFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesFieldBuilder() { if (storeFlushesBuilder_ == null) { storeFlushesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>( storeFlushes_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); storeFlushes_ = null; } return storeFlushesBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName() { return regionName_; } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ public Builder setRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; regionName_ = value; onChanged(); return this; } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 6;</code> */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000020); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FlushDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FlushDescriptor>() { public FlushDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FlushDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FlushDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FlushDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface StoreDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.StoreDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes family_name = 1;</code> */ boolean hasFamilyName(); /** * <code>required bytes family_name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName(); /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ boolean hasStoreHomeDir(); /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ java.lang.String getStoreHomeDir(); /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes(); /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ java.util.List<java.lang.String> getStoreFileList(); /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ int getStoreFileCount(); /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ java.lang.String getStoreFile(int index); /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreFileBytes(int index); /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ boolean hasStoreFileSizeBytes(); /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ long getStoreFileSizeBytes(); } /** * Protobuf type {@code hbase.pb.StoreDescriptor} */ public static final class StoreDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.StoreDescriptor) StoreDescriptorOrBuilder { // Use StoreDescriptor.newBuilder() to construct. private StoreDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private StoreDescriptor() { familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; storeHomeDir_ = ""; storeFile_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; storeFileSizeBytes_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StoreDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; familyName_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; storeHomeDir_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } storeFile_.add(bs); break; } case 32: { bitField0_ |= 0x00000004; storeFileSizeBytes_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = storeFile_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } private int bitField0_; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString familyName_; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName() { return familyName_; } public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; private volatile java.lang.Object storeHomeDir_; /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int STORE_FILE_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList storeFile_; /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getStoreFileList() { return storeFile_; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public int getStoreFileCount() { return storeFile_.size(); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreFileBytes(int index) { return storeFile_.getByteString(index); } public static final int STORE_FILE_SIZE_BYTES_FIELD_NUMBER = 4; private long storeFileSizeBytes_; /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ public boolean hasStoreFileSizeBytes() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ public long getStoreFileSizeBytes() { return storeFileSizeBytes_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasStoreHomeDir()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, storeHomeDir_); } for (int i = 0; i < storeFile_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, storeFile_.getRaw(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(4, storeFileSizeBytes_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, storeHomeDir_); } { int dataSize = 0; for (int i = 0; i < storeFile_.size(); i++) { dataSize += computeStringSizeNoTag(storeFile_.getRaw(i)); } size += dataSize; size += 1 * getStoreFileList().size(); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, storeFileSizeBytes_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); if (hasStoreHomeDir()) { result = result && getStoreHomeDir() .equals(other.getStoreHomeDir()); } result = result && getStoreFileList() .equals(other.getStoreFileList()); result = result && (hasStoreFileSizeBytes() == other.hasStoreFileSizeBytes()); if (hasStoreFileSizeBytes()) { result = result && (getStoreFileSizeBytes() == other.getStoreFileSizeBytes()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasStoreHomeDir()) { hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; hash = (53 * hash) + getStoreHomeDir().hashCode(); } if (getStoreFileCount() > 0) { hash = (37 * hash) + STORE_FILE_FIELD_NUMBER; hash = (53 * hash) + getStoreFileList().hashCode(); } if (hasStoreFileSizeBytes()) { hash = (37 * hash) + STORE_FILE_SIZE_BYTES_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getStoreFileSizeBytes()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.StoreDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.StoreDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); storeHomeDir_ = ""; bitField0_ = (bitField0_ & ~0x00000002); storeFile_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); storeFileSizeBytes_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.storeHomeDir_ = storeHomeDir_; if (((bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = storeFile_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000004); } result.storeFile_ = storeFile_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.storeFileSizeBytes_ = storeFileSizeBytes_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()) return this; if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (other.hasStoreHomeDir()) { bitField0_ |= 0x00000002; storeHomeDir_ = other.storeHomeDir_; onChanged(); } if (!other.storeFile_.isEmpty()) { if (storeFile_.isEmpty()) { storeFile_ = other.storeFile_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureStoreFileIsMutable(); storeFile_.addAll(other.storeFile_); } onChanged(); } if (other.hasStoreFileSizeBytes()) { setStoreFileSizeBytes(other.getStoreFileSizeBytes()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { return false; } if (!hasStoreHomeDir()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString familyName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamilyName() { return familyName_; } /** * <code>required bytes family_name = 1;</code> */ public Builder setFamilyName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; familyName_ = value; onChanged(); return this; } /** * <code>required bytes family_name = 1;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000001); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; } private java.lang.Object storeHomeDir_ = ""; /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public Builder setStoreHomeDir( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000002); storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); onChanged(); return this; } /** * <pre> *relative to region dir * </pre> * * <code>required string store_home_dir = 2;</code> */ public Builder setStoreHomeDirBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList storeFile_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureStoreFileIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(storeFile_); bitField0_ |= 0x00000004; } } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getStoreFileList() { return storeFile_.getUnmodifiableView(); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public int getStoreFileCount() { return storeFile_.size(); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStoreFileBytes(int index) { return storeFile_.getByteString(index); } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public Builder setStoreFile( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureStoreFileIsMutable(); storeFile_.set(index, value); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public Builder addStoreFile( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureStoreFileIsMutable(); storeFile_.add(value); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public Builder addAllStoreFile( java.lang.Iterable<java.lang.String> values) { ensureStoreFileIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, storeFile_); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public Builder clearStoreFile() { storeFile_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * relative to store dir * </pre> * * <code>repeated string store_file = 3;</code> */ public Builder addStoreFileBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureStoreFileIsMutable(); storeFile_.add(value); onChanged(); return this; } private long storeFileSizeBytes_ ; /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ public boolean hasStoreFileSizeBytes() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ public long getStoreFileSizeBytes() { return storeFileSizeBytes_; } /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ public Builder setStoreFileSizeBytes(long value) { bitField0_ |= 0x00000008; storeFileSizeBytes_ = value; onChanged(); return this; } /** * <pre> * size of store file * </pre> * * <code>optional uint64 store_file_size_bytes = 4;</code> */ public Builder clearStoreFileSizeBytes() { bitField0_ = (bitField0_ & ~0x00000008); storeFileSizeBytes_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.StoreDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.StoreDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StoreDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<StoreDescriptor>() { public StoreDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new StoreDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StoreDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StoreDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface BulkLoadDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); /** * <code>required bytes encoded_region_name = 2;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> getStoresList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getStores(int index); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ int getStoresCount(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index); /** * <code>required int64 bulkload_seq_num = 4;</code> */ boolean hasBulkloadSeqNum(); /** * <code>required int64 bulkload_seq_num = 4;</code> */ long getBulkloadSeqNum(); } /** * <pre> ** * Special WAL entry used for writing bulk load events to WAL * </pre> * * Protobuf type {@code hbase.pb.BulkLoadDescriptor} */ public static final class BulkLoadDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadDescriptor) BulkLoadDescriptorOrBuilder { // Use BulkLoadDescriptor.newBuilder() to construct. private BulkLoadDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BulkLoadDescriptor() { encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; stores_ = java.util.Collections.emptyList(); bulkloadSeqNum_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BulkLoadDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { bitField0_ |= 0x00000002; encodedRegionName_ = input.readBytes(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor>(); mutable_bitField0_ |= 0x00000004; } stores_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); break; } case 32: { bitField0_ |= 0x00000004; bulkloadSeqNum_ = input.readInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { stores_ = java.util.Collections.unmodifiableList(stores_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); } private int bitField0_; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } public static final int STORES_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> stores_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public int getStoresCount() { return stores_.size(); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { return stores_.get(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { return stores_.get(index); } public static final int BULKLOAD_SEQ_NUM_FIELD_NUMBER = 4; private long bulkloadSeqNum_; /** * <code>required int64 bulkload_seq_num = 4;</code> */ public boolean hasBulkloadSeqNum() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public long getBulkloadSeqNum() { return bulkloadSeqNum_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } if (!hasBulkloadSeqNum()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encodedRegionName_); } for (int i = 0; i < stores_.size(); i++) { output.writeMessage(3, stores_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(4, bulkloadSeqNum_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, encodedRegionName_); } for (int i = 0; i < stores_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, stores_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt64Size(4, bulkloadSeqNum_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && getStoresList() .equals(other.getStoresList()); result = result && (hasBulkloadSeqNum() == other.hasBulkloadSeqNum()); if (hasBulkloadSeqNum()) { result = result && (getBulkloadSeqNum() == other.getBulkloadSeqNum()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (getStoresCount() > 0) { hash = (37 * hash) + STORES_FIELD_NUMBER; hash = (53 * hash) + getStoresList().hashCode(); } if (hasBulkloadSeqNum()) { hash = (37 * hash) + BULKLOAD_SEQ_NUM_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getBulkloadSeqNum()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Special WAL entry used for writing bulk load events to WAL * </pre> * * Protobuf type {@code hbase.pb.BulkLoadDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getStoresFieldBuilder(); } } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { storesBuilder_.clear(); } bulkloadSeqNum_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.encodedRegionName_ = encodedRegionName_; if (storesBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { stores_ = java.util.Collections.unmodifiableList(stores_); bitField0_ = (bitField0_ & ~0x00000004); } result.stores_ = stores_; } else { result.stores_ = storesBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.bulkloadSeqNum_ = bulkloadSeqNum_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (storesBuilder_ == null) { if (!other.stores_.isEmpty()) { if (stores_.isEmpty()) { stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureStoresIsMutable(); stores_.addAll(other.stores_); } onChanged(); } } else { if (!other.stores_.isEmpty()) { if (storesBuilder_.isEmpty()) { storesBuilder_.dispose(); storesBuilder_ = null; stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000004); storesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; } else { storesBuilder_.addAllMessages(other.stores_); } } } if (other.hasBulkloadSeqNum()) { setBulkloadSeqNum(other.getBulkloadSeqNum()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } if (!hasBulkloadSeqNum()) { return false; } if (!getTableName().isInitialized()) { return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder setEncodedRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor>(stores_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { if (storesBuilder_ == null) { return java.util.Collections.unmodifiableList(stores_); } else { return storesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public int getStoresCount() { if (storesBuilder_ == null) { return stores_.size(); } else { return storesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.set(index, value); onChanged(); } else { storesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.set(index, builderForValue.build()); onChanged(); } else { storesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(value); onChanged(); } else { storesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(index, value); onChanged(); } else { storesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(index, builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addAllStores( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, stores_); onChanged(); } else { storesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder clearStores() { if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { storesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder removeStores(int index) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.remove(index); onChanged(); } else { storesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder( int index) { return getStoresFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { if (storesBuilder_ != null) { return storesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(stores_); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() { return getStoresFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder( int index) { return getStoresFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder> getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { storesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); stores_ = null; } return storesBuilder_; } private long bulkloadSeqNum_ ; /** * <code>required int64 bulkload_seq_num = 4;</code> */ public boolean hasBulkloadSeqNum() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public long getBulkloadSeqNum() { return bulkloadSeqNum_; } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public Builder setBulkloadSeqNum(long value) { bitField0_ |= 0x00000008; bulkloadSeqNum_ = value; onChanged(); return this; } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public Builder clearBulkloadSeqNum() { bitField0_ = (bitField0_ & ~0x00000008); bulkloadSeqNum_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BulkLoadDescriptor>() { public BulkLoadDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BulkLoadDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionEventDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionEventDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ boolean hasEventType(); /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType(); /** * <code>required bytes table_name = 2;</code> */ boolean hasTableName(); /** * <code>required bytes table_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName(); /** * <code>required bytes encoded_region_name = 3;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName(); /** * <code>optional uint64 log_sequence_number = 4;</code> */ boolean hasLogSequenceNumber(); /** * <code>optional uint64 log_sequence_number = 4;</code> */ long getLogSequenceNumber(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> getStoresList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getStores(int index); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ int getStoresCount(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index); /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ boolean hasServer(); /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ boolean hasRegionName(); /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName(); } /** * <pre> ** * Special WAL entry to hold all related to a region event (open/close). * </pre> * * Protobuf type {@code hbase.pb.RegionEventDescriptor} */ public static final class RegionEventDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionEventDescriptor) RegionEventDescriptorOrBuilder { // Use RegionEventDescriptor.newBuilder() to construct. private RegionEventDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionEventDescriptor() { eventType_ = 0; tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; logSequenceNumber_ = 0L; stores_ = java.util.Collections.emptyList(); regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionEventDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; eventType_ = rawValue; } break; } case 18: { bitField0_ |= 0x00000002; tableName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; encodedRegionName_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; logSequenceNumber_ = input.readUInt64(); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor>(); mutable_bitField0_ |= 0x00000010; } stores_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); break; } case 50: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = server_.toBuilder(); } server_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(server_); server_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 58: { bitField0_ |= 0x00000020; regionName_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { stores_ = java.util.Collections.unmodifiableList(stores_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); } /** * Protobuf enum {@code hbase.pb.RegionEventDescriptor.EventType} */ public enum EventType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>REGION_OPEN = 0;</code> */ REGION_OPEN(0), /** * <code>REGION_CLOSE = 1;</code> */ REGION_CLOSE(1), ; /** * <code>REGION_OPEN = 0;</code> */ public static final int REGION_OPEN_VALUE = 0; /** * <code>REGION_CLOSE = 1;</code> */ public static final int REGION_CLOSE_VALUE = 1; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static EventType valueOf(int value) { return forNumber(value); } public static EventType forNumber(int value) { switch (value) { case 0: return REGION_OPEN; case 1: return REGION_CLOSE; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<EventType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< EventType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<EventType>() { public EventType findValueByNumber(int number) { return EventType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0); } private static final EventType[] VALUES = values(); public static EventType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private EventType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.RegionEventDescriptor.EventType) } private int bitField0_; public static final int EVENT_TYPE_FIELD_NUMBER = 1; private int eventType_; /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public boolean hasEventType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(eventType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN : result; } public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4; private long logSequenceNumber_; /** * <code>optional uint64 log_sequence_number = 4;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } public static final int STORES_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> stores_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public int getStoresCount() { return stores_.size(); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { return stores_.get(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { return stores_.get(index); } public static final int SERVER_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public boolean hasServer() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } public static final int REGION_NAME_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString regionName_; /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName() { return regionName_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasEventType()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasServer()) { if (!getServer().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, eventType_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, logSequenceNumber_); } for (int i = 0; i < stores_.size(); i++) { output.writeMessage(5, stores_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(6, getServer()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(7, regionName_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, eventType_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, logSequenceNumber_); } for (int i = 0; i < stores_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, stores_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(6, getServer()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(7, regionName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) obj; boolean result = true; result = result && (hasEventType() == other.hasEventType()); if (hasEventType()) { result = result && eventType_ == other.eventType_; } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); } result = result && getStoresList() .equals(other.getStoresList()); result = result && (hasServer() == other.hasServer()); if (hasServer()) { result = result && getServer() .equals(other.getServer()); } result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { result = result && getRegionName() .equals(other.getRegionName()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEventType()) { hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER; hash = (53 * hash) + eventType_; } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getLogSequenceNumber()); } if (getStoresCount() > 0) { hash = (37 * hash) + STORES_FIELD_NUMBER; hash = (53 * hash) + getStoresList().hashCode(); } if (hasServer()) { hash = (37 * hash) + SERVER_FIELD_NUMBER; hash = (53 * hash) + getServer().hashCode(); } if (hasRegionName()) { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Special WAL entry to hold all related to a region event (open/close). * </pre> * * Protobuf type {@code hbase.pb.RegionEventDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionEventDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getStoresFieldBuilder(); getServerFieldBuilder(); } } public Builder clear() { super.clear(); eventType_ = 0; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); logSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { storesBuilder_.clear(); } if (serverBuilder_ == null) { server_ = null; } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000040); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.eventType_ = eventType_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.logSequenceNumber_ = logSequenceNumber_; if (storesBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { stores_ = java.util.Collections.unmodifiableList(stores_); bitField0_ = (bitField0_ & ~0x00000010); } result.stores_ = stores_; } else { result.stores_ = storesBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000010; } if (serverBuilder_ == null) { result.server_ = server_; } else { result.server_ = serverBuilder_.build(); } if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000020; } result.regionName_ = regionName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this; if (other.hasEventType()) { setEventType(other.getEventType()); } if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); } if (storesBuilder_ == null) { if (!other.stores_.isEmpty()) { if (stores_.isEmpty()) { stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureStoresIsMutable(); stores_.addAll(other.stores_); } onChanged(); } } else { if (!other.stores_.isEmpty()) { if (storesBuilder_.isEmpty()) { storesBuilder_.dispose(); storesBuilder_ = null; stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000010); storesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; } else { storesBuilder_.addAllMessages(other.stores_); } } } if (other.hasServer()) { mergeServer(other.getServer()); } if (other.hasRegionName()) { setRegionName(other.getRegionName()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasEventType()) { return false; } if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { return false; } } if (hasServer()) { if (!getServer().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int eventType_ = 0; /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public boolean hasEventType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType result = org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(eventType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN : result; } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public Builder setEventType(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; eventType_ = value.getNumber(); onChanged(); return this; } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public Builder clearEventType() { bitField0_ = (bitField0_ & ~0x00000001); eventType_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tableName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 2;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 2;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString encodedRegionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder setEncodedRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000004); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } private long logSequenceNumber_ ; /** * <code>optional uint64 log_sequence_number = 4;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public Builder setLogSequenceNumber(long value) { bitField0_ |= 0x00000008; logSequenceNumber_ = value; onChanged(); return this; } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public Builder clearLogSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000008); logSequenceNumber_ = 0L; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor>(stores_); bitField0_ |= 0x00000010; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { if (storesBuilder_ == null) { return java.util.Collections.unmodifiableList(stores_); } else { return storesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public int getStoresCount() { if (storesBuilder_ == null) { return stores_.size(); } else { return storesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.set(index, value); onChanged(); } else { storesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.set(index, builderForValue.build()); onChanged(); } else { storesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(value); onChanged(); } else { storesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(index, value); onChanged(); } else { storesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(index, builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addAllStores( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor> values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, stores_); onChanged(); } else { storesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder clearStores() { if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { storesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder removeStores(int index) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.remove(index); onChanged(); } else { storesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder( int index) { return getStoresFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { if (storesBuilder_ != null) { return storesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(stores_); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() { return getStoresFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder( int index) { return getStoresFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder> getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { storesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); stores_ = null; } return storesBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public boolean hasServer() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } else { return serverBuilder_.getMessage(); } } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public Builder setServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { throw new NullPointerException(); } server_ = value; onChanged(); } else { serverBuilder_.setMessage(value); } bitField0_ |= 0x00000020; return this; } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public Builder setServer( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { server_ = builderForValue.build(); onChanged(); } else { serverBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; return this; } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = null; onChanged(); } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000020; onChanged(); return getServerFieldBuilder().getBuilder(); } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { return server_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : server_; } } /** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { serverBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( getServer(), getParentForChildren(), isClean()); server_ = null; } return serverBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString regionName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRegionName() { return regionName_; } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public Builder setRegionName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; regionName_ = value; onChanged(); return this; } /** * <pre> * full region name * </pre> * * <code>optional bytes region_name = 7;</code> */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000040); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionEventDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionEventDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionEventDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionEventDescriptor>() { public RegionEventDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionEventDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionEventDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionEventDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface WALTrailerOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.WALTrailer) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { } /** * <pre> ** * A trailer that is appended to the end of a properly closed WAL file. * If missing, this is either a legacy or a corrupted WAL file. * N.B. This trailer currently doesn't contain any information and we * purposefully don't expose it in the WAL APIs. It's for future growth. * </pre> * * Protobuf type {@code hbase.pb.WALTrailer} */ public static final class WALTrailer extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.WALTrailer) WALTrailerOrBuilder { // Use WALTrailer.newBuilder() to construct. private WALTrailer(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WALTrailer() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WALTrailer( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.Builder.class); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer other = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer) obj; boolean result = true; result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * A trailer that is appended to the end of a properly closed WAL file. * If missing, this is either a legacy or a corrupted WAL file. * N.B. This trailer currently doesn't contain any information and we * purposefully don't expose it in the WAL APIs. It's for future growth. * </pre> * * Protobuf type {@code hbase.pb.WALTrailer} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.WALTrailer) org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailerOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer build() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer result = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer(this); onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.WALTrailer) } // @@protoc_insertion_point(class_scope:hbase.pb.WALTrailer) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALTrailer> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<WALTrailer>() { public WALTrailer parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new WALTrailer(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALTrailer> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<WALTrailer> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALHeader_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALHeader_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALKey_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALKey_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FamilyScope_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FamilyScope_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompactionDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StoreDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionEventDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALTrailer_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_WALTrailer_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\tWAL.proto\022\010hbase.pb\032\013HBase.proto\"\217\001\n\tW" + "ALHeader\022\027\n\017has_compression\030\001 \001(\010\022\026\n\016enc" + "ryption_key\030\002 \001(\014\022\033\n\023has_tag_compression" + "\030\003 \001(\010\022\027\n\017writer_cls_name\030\004 \001(\t\022\033\n\023cell_" + "codec_cls_name\030\005 \001(\t\"\273\002\n\006WALKey\022\033\n\023encod" + "ed_region_name\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(\014" + "\022\033\n\023log_sequence_number\030\003 \002(\004\022\022\n\nwrite_t" + "ime\030\004 \002(\004\022&\n\ncluster_id\030\005 \001(\0132\016.hbase.pb" + ".UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132\025.hbase.pb.Fam" + "ilyScope\022\032\n\022following_kv_count\030\007 \001(\r\022#\n\013", "cluster_ids\030\010 \003(\0132\016.hbase.pb.UUID\022\022\n\nnon" + "ceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001(\004\022\034\n\024orig_seq" + "uence_number\030\013 \001(\004\"F\n\013FamilyScope\022\016\n\006fam" + "ily\030\001 \002(\014\022\'\n\nscope_type\030\002 \002(\0162\023.hbase.pb" + ".ScopeType\"\276\001\n\024CompactionDescriptor\022\022\n\nt" + "able_name\030\001 \002(\014\022\033\n\023encoded_region_name\030\002" + " \002(\014\022\023\n\013family_name\030\003 \002(\014\022\030\n\020compaction_" + "input\030\004 \003(\t\022\031\n\021compaction_output\030\005 \003(\t\022\026" + "\n\016store_home_dir\030\006 \002(\t\022\023\n\013region_name\030\007 " + "\001(\014\"\244\003\n\017FlushDescriptor\0225\n\006action\030\001 \002(\0162", "%.hbase.pb.FlushDescriptor.FlushAction\022\022" + "\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_region_nam" + "e\030\003 \002(\014\022\035\n\025flush_sequence_number\030\004 \001(\004\022E" + "\n\rstore_flushes\030\005 \003(\0132..hbase.pb.FlushDe" + "scriptor.StoreFlushDescriptor\022\023\n\013region_" + "name\030\006 \001(\014\032Y\n\024StoreFlushDescriptor\022\023\n\013fa" + "mily_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022" + "\024\n\014flush_output\030\003 \003(\t\"S\n\013FlushAction\022\017\n\013" + "START_FLUSH\020\000\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABORT" + "_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003\"q\n\017StoreDescr", "iptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_home" + "_dir\030\002 \002(\t\022\022\n\nstore_file\030\003 \003(\t\022\035\n\025store_" + "file_size_bytes\030\004 \001(\004\"\237\001\n\022BulkLoadDescri" + "ptor\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Tabl" + "eName\022\033\n\023encoded_region_name\030\002 \002(\014\022)\n\006st" + "ores\030\003 \003(\0132\031.hbase.pb.StoreDescriptor\022\030\n" + "\020bulkload_seq_num\030\004 \002(\003\"\272\002\n\025RegionEventD" + "escriptor\022=\n\nevent_type\030\001 \002(\0162).hbase.pb" + ".RegionEventDescriptor.EventType\022\022\n\ntabl" + "e_name\030\002 \002(\014\022\033\n\023encoded_region_name\030\003 \002(", "\014\022\033\n\023log_sequence_number\030\004 \001(\004\022)\n\006stores" + "\030\005 \003(\0132\031.hbase.pb.StoreDescriptor\022$\n\006ser" + "ver\030\006 \001(\0132\024.hbase.pb.ServerName\022\023\n\013regio" + "n_name\030\007 \001(\014\".\n\tEventType\022\017\n\013REGION_OPEN" + "\020\000\022\020\n\014REGION_CLOSE\020\001\"\014\n\nWALTrailer*d\n\tSc" + "opeType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030" + "REPLICATION_SCOPE_GLOBAL\020\001\022\034\n\030REPLICATIO" + "N_SCOPE_SERIAL\020\002BF\n1org.apache.hadoop.hb" + "ase.shaded.protobuf.generatedB\tWALProtos" + "H\001\210\001\000\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); internal_static_hbase_pb_WALHeader_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_WALHeader_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_WALHeader_descriptor, new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", }); internal_static_hbase_pb_WALKey_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_WALKey_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_WALKey_descriptor, new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", }); internal_static_hbase_pb_FamilyScope_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_FamilyScope_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_FamilyScope_descriptor, new java.lang.String[] { "Family", "ScopeType", }); internal_static_hbase_pb_CompactionDescriptor_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CompactionDescriptor_descriptor, new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", "RegionName", }); internal_static_hbase_pb_FlushDescriptor_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_FlushDescriptor_descriptor, new java.lang.String[] { "Action", "TableName", "EncodedRegionName", "FlushSequenceNumber", "StoreFlushes", "RegionName", }); internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor = internal_static_hbase_pb_FlushDescriptor_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor, new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", }); internal_static_hbase_pb_StoreDescriptor_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_StoreDescriptor_descriptor, new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", "StoreFileSizeBytes", }); internal_static_hbase_pb_BulkLoadDescriptor_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_BulkLoadDescriptor_descriptor, new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", }); internal_static_hbase_pb_RegionEventDescriptor_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionEventDescriptor_descriptor, new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", "RegionName", }); internal_static_hbase_pb_WALTrailer_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hbase_pb_WALTrailer_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_WALTrailer_descriptor, new java.lang.String[] { }); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }