// Generated by the protocol buffer compiler. DO NOT EDIT! // source: HBase.proto package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class HBaseProtos { private HBaseProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } /** * <pre> * Comparison operators * </pre> * * Protobuf enum {@code hbase.pb.CompareType} */ public enum CompareType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>LESS = 0;</code> */ LESS(0), /** * <code>LESS_OR_EQUAL = 1;</code> */ LESS_OR_EQUAL(1), /** * <code>EQUAL = 2;</code> */ EQUAL(2), /** * <code>NOT_EQUAL = 3;</code> */ NOT_EQUAL(3), /** * <code>GREATER_OR_EQUAL = 4;</code> */ GREATER_OR_EQUAL(4), /** * <code>GREATER = 5;</code> */ GREATER(5), /** * <code>NO_OP = 6;</code> */ NO_OP(6), ; /** * <code>LESS = 0;</code> */ public static final int LESS_VALUE = 0; /** * <code>LESS_OR_EQUAL = 1;</code> */ public static final int LESS_OR_EQUAL_VALUE = 1; /** * <code>EQUAL = 2;</code> */ public static final int EQUAL_VALUE = 2; /** * <code>NOT_EQUAL = 3;</code> */ public static final int NOT_EQUAL_VALUE = 3; /** * <code>GREATER_OR_EQUAL = 4;</code> */ public static final int GREATER_OR_EQUAL_VALUE = 4; /** * <code>GREATER = 5;</code> */ public static final int GREATER_VALUE = 5; /** * <code>NO_OP = 6;</code> */ public static final int NO_OP_VALUE = 6; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CompareType valueOf(int value) { return forNumber(value); } public static CompareType forNumber(int value) { switch (value) { case 0: return LESS; case 1: return LESS_OR_EQUAL; case 2: return EQUAL; case 3: return NOT_EQUAL; case 4: return GREATER_OR_EQUAL; case 5: return GREATER; case 6: return NO_OP; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<CompareType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< CompareType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<CompareType>() { public CompareType findValueByNumber(int number) { return CompareType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); } private static final CompareType[] VALUES = values(); public static CompareType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private CompareType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.CompareType) } /** * Protobuf enum {@code hbase.pb.TimeUnit} */ public enum TimeUnit implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>NANOSECONDS = 1;</code> */ NANOSECONDS(1), /** * <code>MICROSECONDS = 2;</code> */ MICROSECONDS(2), /** * <code>MILLISECONDS = 3;</code> */ MILLISECONDS(3), /** * <code>SECONDS = 4;</code> */ SECONDS(4), /** * <code>MINUTES = 5;</code> */ MINUTES(5), /** * <code>HOURS = 6;</code> */ HOURS(6), /** * <code>DAYS = 7;</code> */ DAYS(7), ; /** * <code>NANOSECONDS = 1;</code> */ public static final int NANOSECONDS_VALUE = 1; /** * <code>MICROSECONDS = 2;</code> */ public static final int MICROSECONDS_VALUE = 2; /** * <code>MILLISECONDS = 3;</code> */ public static final int MILLISECONDS_VALUE = 3; /** * <code>SECONDS = 4;</code> */ public static final int SECONDS_VALUE = 4; /** * <code>MINUTES = 5;</code> */ public static final int MINUTES_VALUE = 5; /** * <code>HOURS = 6;</code> */ public static final int HOURS_VALUE = 6; /** * <code>DAYS = 7;</code> */ public static final int DAYS_VALUE = 7; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TimeUnit valueOf(int value) { return forNumber(value); } public static TimeUnit forNumber(int value) { switch (value) { case 1: return NANOSECONDS; case 2: return MICROSECONDS; case 3: return MILLISECONDS; case 4: return SECONDS; case 5: return MINUTES; case 6: return HOURS; case 7: return DAYS; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<TimeUnit> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< TimeUnit> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<TimeUnit>() { public TimeUnit findValueByNumber(int number) { return TimeUnit.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1); } private static final TimeUnit[] VALUES = values(); public static TimeUnit valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private TimeUnit(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.TimeUnit) } public interface TableNameOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.TableName) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes namespace = 1;</code> */ boolean hasNamespace(); /** * <code>required bytes namespace = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNamespace(); /** * <code>required bytes qualifier = 2;</code> */ boolean hasQualifier(); /** * <code>required bytes qualifier = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier(); } /** * <pre> ** * Table Name * </pre> * * Protobuf type {@code hbase.pb.TableName} */ public static final class TableName extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.TableName) TableNameOrBuilder { // Use TableName.newBuilder() to construct. private TableName(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TableName() { namespace_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableName( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; namespace_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; qualifier_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder.class); } private int bitField0_; public static final int NAMESPACE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString namespace_; /** * <code>required bytes namespace = 1;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes namespace = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNamespace() { return namespace_; } public static final int QUALIFIER_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_; /** * <code>required bytes qualifier = 2;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes qualifier = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() { return qualifier_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasNamespace()) { memoizedIsInitialized = 0; return false; } if (!hasQualifier()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, namespace_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, qualifier_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, namespace_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, qualifier_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) obj; boolean result = true; result = result && (hasNamespace() == other.hasNamespace()); if (hasNamespace()) { result = result && getNamespace() .equals(other.getNamespace()); } result = result && (hasQualifier() == other.hasQualifier()); if (hasQualifier()) { result = result && getQualifier() .equals(other.getQualifier()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNamespace()) { hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getNamespace().hashCode(); } if (hasQualifier()) { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifier().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Table Name * </pre> * * Protobuf type {@code hbase.pb.TableName} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.TableName) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); namespace_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namespace_ = namespace_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.qualifier_ = qualifier_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) return this; if (other.hasNamespace()) { setNamespace(other.getNamespace()); } if (other.hasQualifier()) { setQualifier(other.getQualifier()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasNamespace()) { return false; } if (!hasQualifier()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString namespace_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes namespace = 1;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes namespace = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNamespace() { return namespace_; } /** * <code>required bytes namespace = 1;</code> */ public Builder setNamespace(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespace_ = value; onChanged(); return this; } /** * <code>required bytes namespace = 1;</code> */ public Builder clearNamespace() { bitField0_ = (bitField0_ & ~0x00000001); namespace_ = getDefaultInstance().getNamespace(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes qualifier = 2;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes qualifier = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() { return qualifier_; } /** * <code>required bytes qualifier = 2;</code> */ public Builder setQualifier(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; qualifier_ = value; onChanged(); return this; } /** * <code>required bytes qualifier = 2;</code> */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000002); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.TableName) } // @@protoc_insertion_point(class_scope:hbase.pb.TableName) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableName> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<TableName>() { public TableName parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new TableName(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableName> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableName> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface TableSchemaOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.TableSchema) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ int getAttributesCount(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList(); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ int getColumnFamiliesCount(); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList(); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * <pre> ** * Table Schema * Inspired by the rest TableSchema * </pre> * * Protobuf type {@code hbase.pb.TableSchema} */ public static final class TableSchema extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.TableSchema) TableSchemaOrBuilder { // Use TableSchema.newBuilder() to construct. private TableSchema(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TableSchema() { attributes_ = java.util.Collections.emptyList(); columnFamilies_ = java.util.Collections.emptyList(); configuration_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableSchema( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(); mutable_bitField0_ |= 0x00000002; } attributes_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema>(); mutable_bitField0_ |= 0x00000004; } columnFamilies_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry)); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000008; } configuration_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); } if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); } if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder.class); } private int bitField0_; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { return attributes_.size(); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { return attributes_.get(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_; /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() { return columnFamilies_; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList() { return columnFamilies_; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public int getColumnFamiliesCount() { return columnFamilies_.size(); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { return columnFamilies_.get(index); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index) { return columnFamilies_.get(index); } public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasTableName()) { if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getColumnFamiliesCount(); i++) { if (!getColumnFamilies(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getTableName()); } for (int i = 0; i < attributes_.size(); i++) { output.writeMessage(2, attributes_.get(i)); } for (int i = 0; i < columnFamilies_.size(); i++) { output.writeMessage(3, columnFamilies_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(4, configuration_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getTableName()); } for (int i = 0; i < attributes_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, attributes_.get(i)); } for (int i = 0; i < columnFamilies_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, columnFamilies_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, configuration_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getAttributesList() .equals(other.getAttributesList()); result = result && getColumnFamiliesList() .equals(other.getColumnFamiliesList()); result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (getAttributesCount() > 0) { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getAttributesList().hashCode(); } if (getColumnFamiliesCount() > 0) { hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER; hash = (53 * hash) + getColumnFamiliesList().hashCode(); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Table Schema * Inspired by the rest TableSchema * </pre> * * Protobuf type {@code hbase.pb.TableSchema} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.TableSchema) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getAttributesFieldBuilder(); getColumnFamiliesFieldBuilder(); getConfigurationFieldBuilder(); } } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { attributesBuilder_.clear(); } if (columnFamiliesBuilder_ == null) { columnFamilies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { columnFamiliesBuilder_.clear(); } if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { configurationBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (attributesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); bitField0_ = (bitField0_ & ~0x00000002); } result.attributes_ = attributes_; } else { result.attributes_ = attributesBuilder_.build(); } if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); bitField0_ = (bitField0_ & ~0x00000004); } result.columnFamilies_ = columnFamilies_; } else { result.columnFamilies_ = columnFamiliesBuilder_.build(); } if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000008); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (attributesBuilder_ == null) { if (!other.attributes_.isEmpty()) { if (attributes_.isEmpty()) { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAttributesIsMutable(); attributes_.addAll(other.attributes_); } onChanged(); } } else { if (!other.attributes_.isEmpty()) { if (attributesBuilder_.isEmpty()) { attributesBuilder_.dispose(); attributesBuilder_ = null; attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); attributesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); } } } if (columnFamiliesBuilder_ == null) { if (!other.columnFamilies_.isEmpty()) { if (columnFamilies_.isEmpty()) { columnFamilies_ = other.columnFamilies_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureColumnFamiliesIsMutable(); columnFamilies_.addAll(other.columnFamilies_); } onChanged(); } } else { if (!other.columnFamilies_.isEmpty()) { if (columnFamiliesBuilder_.isEmpty()) { columnFamiliesBuilder_.dispose(); columnFamiliesBuilder_ = null; columnFamilies_ = other.columnFamilies_; bitField0_ = (bitField0_ & ~0x00000004); columnFamiliesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnFamiliesFieldBuilder() : null; } else { columnFamiliesBuilder_.addAllMessages(other.columnFamilies_); } } } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasTableName()) { if (!getTableName().isInitialized()) { return false; } } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { return false; } } for (int i = 0; i < getColumnFamiliesCount(); i++) { if (!getColumnFamilies(i).isInitialized()) { return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); } else { return attributesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); } else { return attributesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.set(index, value); onChanged(); } else { attributesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.set(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(value); onChanged(); } else { attributesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(index, value); onChanged(); } else { attributesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAllAttributes( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { attributesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.remove(index); onChanged(); } else { attributesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { if (attributesBuilder_ != null) { return attributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attributes_); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { attributesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( attributes_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); attributes_ = null; } return attributesBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_ = java.util.Collections.emptyList(); private void ensureColumnFamiliesIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema>(columnFamilies_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() { if (columnFamiliesBuilder_ == null) { return java.util.Collections.unmodifiableList(columnFamilies_); } else { return columnFamiliesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public int getColumnFamiliesCount() { if (columnFamiliesBuilder_ == null) { return columnFamilies_.size(); } else { return columnFamiliesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { if (columnFamiliesBuilder_ == null) { return columnFamilies_.get(index); } else { return columnFamiliesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder setColumnFamilies( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnFamiliesIsMutable(); columnFamilies_.set(index, value); onChanged(); } else { columnFamiliesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder setColumnFamilies( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.set(index, builderForValue.build()); onChanged(); } else { columnFamiliesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnFamiliesIsMutable(); columnFamilies_.add(value); onChanged(); } else { columnFamiliesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnFamiliesIsMutable(); columnFamilies_.add(index, value); onChanged(); } else { columnFamiliesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.add(builderForValue.build()); onChanged(); } else { columnFamiliesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.add(index, builderForValue.build()); onChanged(); } else { columnFamiliesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addAllColumnFamilies( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema> values) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, columnFamilies_); onChanged(); } else { columnFamiliesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { columnFamiliesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder removeColumnFamilies(int index) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.remove(index); onChanged(); } else { columnFamiliesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder( int index) { return getColumnFamiliesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index) { if (columnFamiliesBuilder_ == null) { return columnFamilies_.get(index); } else { return columnFamiliesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(columnFamilies_); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() { return getColumnFamiliesFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder( int index) { return getColumnFamiliesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder> getColumnFamiliesBuilderList() { return getColumnFamiliesFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { columnFamiliesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); columnFamilies_ = null; } return columnFamiliesBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.TableSchema) } // @@protoc_insertion_point(class_scope:hbase.pb.TableSchema) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableSchema> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<TableSchema>() { public TableSchema parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new TableSchema(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableSchema> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableSchema> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface TableStateOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.TableState) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ boolean hasState(); /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State getState(); } /** * <pre> ** Denotes state of the table * </pre> * * Protobuf type {@code hbase.pb.TableState} */ public static final class TableState extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.TableState) TableStateOrBuilder { // Use TableState.newBuilder() to construct. private TableState(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TableState() { state_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableState( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State value = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; state_ = rawValue; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder.class); } /** * <pre> * Table's current state * </pre> * * Protobuf enum {@code hbase.pb.TableState.State} */ public enum State implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>ENABLED = 0;</code> */ ENABLED(0), /** * <code>DISABLED = 1;</code> */ DISABLED(1), /** * <code>DISABLING = 2;</code> */ DISABLING(2), /** * <code>ENABLING = 3;</code> */ ENABLING(3), ; /** * <code>ENABLED = 0;</code> */ public static final int ENABLED_VALUE = 0; /** * <code>DISABLED = 1;</code> */ public static final int DISABLED_VALUE = 1; /** * <code>DISABLING = 2;</code> */ public static final int DISABLING_VALUE = 2; /** * <code>ENABLING = 3;</code> */ public static final int ENABLING_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static State valueOf(int value) { return forNumber(value); } public static State forNumber(int value) { switch (value) { case 0: return ENABLED; case 1: return DISABLED; case 2: return DISABLING; case 3: return ENABLING; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< State> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDescriptor().getEnumTypes().get(0); } private static final State[] VALUES = values(); public static State valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private State(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.TableState.State) } private int bitField0_; public static final int STATE_FIELD_NUMBER = 1; private int state_; /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasState()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, state_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, state_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) obj; boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { result = result && state_ == other.state_; } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** Denotes state of the table * </pre> * * Protobuf type {@code hbase.pb.TableState} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.TableState) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); state_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.state_ = state_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.getDefaultInstance()) return this; if (other.hasState()) { setState(other.getState()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasState()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int state_ = 0; /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State getState() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(state_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED : result; } /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ public Builder setState(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; state_ = value.getNumber(); onChanged(); return this; } /** * <pre> * This is the table's state. * </pre> * * <code>required .hbase.pb.TableState.State state = 1;</code> */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.TableState) } // @@protoc_insertion_point(class_scope:hbase.pb.TableState) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableState> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<TableState>() { public TableState parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new TableState(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableState> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableState> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ColumnFamilySchemaOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnFamilySchema) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes name = 1;</code> */ boolean hasName(); /** * <code>required bytes name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getName(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ int getAttributesCount(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * <pre> ** * Column Family Schema * Inspired by the rest ColumSchemaMessage * </pre> * * Protobuf type {@code hbase.pb.ColumnFamilySchema} */ public static final class ColumnFamilySchema extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ColumnFamilySchema) ColumnFamilySchemaOrBuilder { // Use ColumnFamilySchema.newBuilder() to construct. private ColumnFamilySchema(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ColumnFamilySchema() { name_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; attributes_ = java.util.Collections.emptyList(); configuration_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnFamilySchema( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(); mutable_bitField0_ |= 0x00000002; } attributes_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000004; } configuration_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); } if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString name_; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getName() { return name_; } public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { return attributes_.size(); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { return attributes_.get(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } public static final int CONFIGURATION_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, name_); } for (int i = 0; i < attributes_.size(); i++) { output.writeMessage(2, attributes_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(3, configuration_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, name_); } for (int i = 0; i < attributes_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, attributes_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, configuration_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getAttributesList() .equals(other.getAttributesList()); result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getAttributesCount() > 0) { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getAttributesList().hashCode(); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Column Family Schema * Inspired by the rest ColumSchemaMessage * </pre> * * Protobuf type {@code hbase.pb.ColumnFamilySchema} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnFamilySchema) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getAttributesFieldBuilder(); getConfigurationFieldBuilder(); } } public Builder clear() { super.clear(); name_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { attributesBuilder_.clear(); } if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { configurationBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (attributesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); bitField0_ = (bitField0_ & ~0x00000002); } result.attributes_ = attributes_; } else { result.attributes_ = attributesBuilder_.build(); } if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000004); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) return this; if (other.hasName()) { setName(other.getName()); } if (attributesBuilder_ == null) { if (!other.attributes_.isEmpty()) { if (attributes_.isEmpty()) { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAttributesIsMutable(); attributes_.addAll(other.attributes_); } onChanged(); } } else { if (!other.attributes_.isEmpty()) { if (attributesBuilder_.isEmpty()) { attributesBuilder_.dispose(); attributesBuilder_ = null; attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); attributesBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); } } } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000004); configurationBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString name_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getName() { return name_; } /** * <code>required bytes name = 1;</code> */ public Builder setName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required bytes name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); } else { return attributesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); } else { return attributesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.set(index, value); onChanged(); } else { attributesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.set(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(value); onChanged(); } else { attributesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(index, value); onChanged(); } else { attributesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAllAttributes( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { attributesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.remove(index); onChanged(); } else { attributesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { if (attributesBuilder_ != null) { return attributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attributes_); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { attributesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( attributes_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); attributes_ = null; } return attributesBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnFamilySchema) } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilySchema) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnFamilySchema> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ColumnFamilySchema>() { public ColumnFamilySchema parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ColumnFamilySchema(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnFamilySchema> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnFamilySchema> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionInfoOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required uint64 region_id = 1;</code> */ boolean hasRegionId(); /** * <code>required uint64 region_id = 1;</code> */ long getRegionId(); /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ boolean hasTableName(); /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); /** * <code>optional bytes start_key = 3;</code> */ boolean hasStartKey(); /** * <code>optional bytes start_key = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartKey(); /** * <code>optional bytes end_key = 4;</code> */ boolean hasEndKey(); /** * <code>optional bytes end_key = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEndKey(); /** * <code>optional bool offline = 5;</code> */ boolean hasOffline(); /** * <code>optional bool offline = 5;</code> */ boolean getOffline(); /** * <code>optional bool split = 6;</code> */ boolean hasSplit(); /** * <code>optional bool split = 6;</code> */ boolean getSplit(); /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ boolean hasReplicaId(); /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ int getReplicaId(); } /** * <pre> ** * Protocol buffer version of HRegionInfo. * </pre> * * Protobuf type {@code hbase.pb.RegionInfo} */ public static final class RegionInfo extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionInfo) RegionInfoOrBuilder { // Use RegionInfo.newBuilder() to construct. private RegionInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionInfo() { regionId_ = 0L; startKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; endKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; offline_ = false; split_ = false; replicaId_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionInfo( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; regionId_ = input.readUInt64(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { bitField0_ |= 0x00000004; startKey_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000008; endKey_ = input.readBytes(); break; } case 40: { bitField0_ |= 0x00000010; offline_ = input.readBool(); break; } case 48: { bitField0_ |= 0x00000020; split_ = input.readBool(); break; } case 56: { bitField0_ |= 0x00000040; replicaId_ = input.readInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); } private int bitField0_; public static final int REGION_ID_FIELD_NUMBER = 1; private long regionId_; /** * <code>required uint64 region_id = 1;</code> */ public boolean hasRegionId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 region_id = 1;</code> */ public long getRegionId() { return regionId_; } public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } public static final int START_KEY_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString startKey_; /** * <code>optional bytes start_key = 3;</code> */ public boolean hasStartKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes start_key = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartKey() { return startKey_; } public static final int END_KEY_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString endKey_; /** * <code>optional bytes end_key = 4;</code> */ public boolean hasEndKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes end_key = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEndKey() { return endKey_; } public static final int OFFLINE_FIELD_NUMBER = 5; private boolean offline_; /** * <code>optional bool offline = 5;</code> */ public boolean hasOffline() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool offline = 5;</code> */ public boolean getOffline() { return offline_; } public static final int SPLIT_FIELD_NUMBER = 6; private boolean split_; /** * <code>optional bool split = 6;</code> */ public boolean hasSplit() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool split = 6;</code> */ public boolean getSplit() { return split_; } public static final int REPLICA_ID_FIELD_NUMBER = 7; private int replicaId_; /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public boolean hasReplicaId() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public int getReplicaId() { return replicaId_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegionId()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, regionId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, startKey_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, endKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, offline_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(6, split_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeInt32(7, replicaId_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(1, regionId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getTableName()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, startKey_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(4, endKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(5, offline_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(6, split_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(7, replicaId_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo) obj; boolean result = true; result = result && (hasRegionId() == other.hasRegionId()); if (hasRegionId()) { result = result && (getRegionId() == other.getRegionId()); } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasStartKey() == other.hasStartKey()); if (hasStartKey()) { result = result && getStartKey() .equals(other.getStartKey()); } result = result && (hasEndKey() == other.hasEndKey()); if (hasEndKey()) { result = result && getEndKey() .equals(other.getEndKey()); } result = result && (hasOffline() == other.hasOffline()); if (hasOffline()) { result = result && (getOffline() == other.getOffline()); } result = result && (hasSplit() == other.hasSplit()); if (hasSplit()) { result = result && (getSplit() == other.getSplit()); } result = result && (hasReplicaId() == other.hasReplicaId()); if (hasReplicaId()) { result = result && (getReplicaId() == other.getReplicaId()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegionId()) { hash = (37 * hash) + REGION_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getRegionId()); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasStartKey()) { hash = (37 * hash) + START_KEY_FIELD_NUMBER; hash = (53 * hash) + getStartKey().hashCode(); } if (hasEndKey()) { hash = (37 * hash) + END_KEY_FIELD_NUMBER; hash = (53 * hash) + getEndKey().hashCode(); } if (hasOffline()) { hash = (37 * hash) + OFFLINE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getOffline()); } if (hasSplit()) { hash = (37 * hash) + SPLIT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getSplit()); } if (hasReplicaId()) { hash = (37 * hash) + REPLICA_ID_FIELD_NUMBER; hash = (53 * hash) + getReplicaId(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Protocol buffer version of HRegionInfo. * </pre> * * Protobuf type {@code hbase.pb.RegionInfo} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionInfo) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } public Builder clear() { super.clear(); regionId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); startKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); endKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); offline_ = false; bitField0_ = (bitField0_ & ~0x00000010); split_ = false; bitField0_ = (bitField0_ & ~0x00000020); replicaId_ = 0; bitField0_ = (bitField0_ & ~0x00000040); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.regionId_ = regionId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.startKey_ = startKey_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.endKey_ = endKey_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.offline_ = offline_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.split_ = split_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.replicaId_ = replicaId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; if (other.hasRegionId()) { setRegionId(other.getRegionId()); } if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasStartKey()) { setStartKey(other.getStartKey()); } if (other.hasEndKey()) { setEndKey(other.getEndKey()); } if (other.hasOffline()) { setOffline(other.getOffline()); } if (other.hasSplit()) { setSplit(other.getSplit()); } if (other.hasReplicaId()) { setReplicaId(other.getReplicaId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionId()) { return false; } if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long regionId_ ; /** * <code>required uint64 region_id = 1;</code> */ public boolean hasRegionId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 region_id = 1;</code> */ public long getRegionId() { return regionId_; } /** * <code>required uint64 region_id = 1;</code> */ public Builder setRegionId(long value) { bitField0_ |= 0x00000001; regionId_ = value; onChanged(); return this; } /** * <code>required uint64 region_id = 1;</code> */ public Builder clearRegionId() { bitField0_ = (bitField0_ & ~0x00000001); regionId_ = 0L; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder setTableName( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString startKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes start_key = 3;</code> */ public boolean hasStartKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes start_key = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartKey() { return startKey_; } /** * <code>optional bytes start_key = 3;</code> */ public Builder setStartKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; startKey_ = value; onChanged(); return this; } /** * <code>optional bytes start_key = 3;</code> */ public Builder clearStartKey() { bitField0_ = (bitField0_ & ~0x00000004); startKey_ = getDefaultInstance().getStartKey(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString endKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes end_key = 4;</code> */ public boolean hasEndKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes end_key = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEndKey() { return endKey_; } /** * <code>optional bytes end_key = 4;</code> */ public Builder setEndKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; endKey_ = value; onChanged(); return this; } /** * <code>optional bytes end_key = 4;</code> */ public Builder clearEndKey() { bitField0_ = (bitField0_ & ~0x00000008); endKey_ = getDefaultInstance().getEndKey(); onChanged(); return this; } private boolean offline_ ; /** * <code>optional bool offline = 5;</code> */ public boolean hasOffline() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool offline = 5;</code> */ public boolean getOffline() { return offline_; } /** * <code>optional bool offline = 5;</code> */ public Builder setOffline(boolean value) { bitField0_ |= 0x00000010; offline_ = value; onChanged(); return this; } /** * <code>optional bool offline = 5;</code> */ public Builder clearOffline() { bitField0_ = (bitField0_ & ~0x00000010); offline_ = false; onChanged(); return this; } private boolean split_ ; /** * <code>optional bool split = 6;</code> */ public boolean hasSplit() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool split = 6;</code> */ public boolean getSplit() { return split_; } /** * <code>optional bool split = 6;</code> */ public Builder setSplit(boolean value) { bitField0_ |= 0x00000020; split_ = value; onChanged(); return this; } /** * <code>optional bool split = 6;</code> */ public Builder clearSplit() { bitField0_ = (bitField0_ & ~0x00000020); split_ = false; onChanged(); return this; } private int replicaId_ ; /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public boolean hasReplicaId() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public int getReplicaId() { return replicaId_; } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public Builder setReplicaId(int value) { bitField0_ |= 0x00000040; replicaId_ = value; onChanged(); return this; } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public Builder clearReplicaId() { bitField0_ = (bitField0_ & ~0x00000040); replicaId_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionInfo) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionInfo) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionInfo> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionInfo>() { public RegionInfo parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionInfo(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionInfo> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionInfo> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface FavoredNodesOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.FavoredNodes) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList(); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ int getFavoredNodeCount(); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList(); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( int index); } /** * <pre> ** * Protocol buffer for favored nodes * </pre> * * Protobuf type {@code hbase.pb.FavoredNodes} */ public static final class FavoredNodes extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.FavoredNodes) FavoredNodesOrBuilder { // Use FavoredNodes.newBuilder() to construct. private FavoredNodes(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FavoredNodes() { favoredNode_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FavoredNodes( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName>(); mutable_bitField0_ |= 0x00000001; } favoredNode_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); } public static final int FAVORED_NODE_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> favoredNode_; /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList() { return favoredNode_; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList() { return favoredNode_; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public int getFavoredNodeCount() { return favoredNode_.size(); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index) { return favoredNode_.get(index); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( int index) { return favoredNode_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getFavoredNodeCount(); i++) { if (!getFavoredNode(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < favoredNode_.size(); i++) { output.writeMessage(1, favoredNode_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < favoredNode_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, favoredNode_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes) obj; boolean result = true; result = result && getFavoredNodeList() .equals(other.getFavoredNodeList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getFavoredNodeCount() > 0) { hash = (37 * hash) + FAVORED_NODE_FIELD_NUMBER; hash = (53 * hash) + getFavoredNodeList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Protocol buffer for favored nodes * </pre> * * Protobuf type {@code hbase.pb.FavoredNodes} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.FavoredNodes) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodesOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getFavoredNodeFieldBuilder(); } } public Builder clear() { super.clear(); if (favoredNodeBuilder_ == null) { favoredNode_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { favoredNodeBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes(this); int from_bitField0_ = bitField0_; if (favoredNodeBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); bitField0_ = (bitField0_ & ~0x00000001); } result.favoredNode_ = favoredNode_; } else { result.favoredNode_ = favoredNodeBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes.getDefaultInstance()) return this; if (favoredNodeBuilder_ == null) { if (!other.favoredNode_.isEmpty()) { if (favoredNode_.isEmpty()) { favoredNode_ = other.favoredNode_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFavoredNodeIsMutable(); favoredNode_.addAll(other.favoredNode_); } onChanged(); } } else { if (!other.favoredNode_.isEmpty()) { if (favoredNodeBuilder_.isEmpty()) { favoredNodeBuilder_.dispose(); favoredNodeBuilder_ = null; favoredNode_ = other.favoredNode_; bitField0_ = (bitField0_ & ~0x00000001); favoredNodeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFavoredNodeFieldBuilder() : null; } else { favoredNodeBuilder_.addAllMessages(other.favoredNode_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getFavoredNodeCount(); i++) { if (!getFavoredNode(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> favoredNode_ = java.util.Collections.emptyList(); private void ensureFavoredNodeIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName>(favoredNode_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodeBuilder_; /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList() { if (favoredNodeBuilder_ == null) { return java.util.Collections.unmodifiableList(favoredNode_); } else { return favoredNodeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public int getFavoredNodeCount() { if (favoredNodeBuilder_ == null) { return favoredNode_.size(); } else { return favoredNodeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index) { if (favoredNodeBuilder_ == null) { return favoredNode_.get(index); } else { return favoredNodeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder setFavoredNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (favoredNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFavoredNodeIsMutable(); favoredNode_.set(index, value); onChanged(); } else { favoredNodeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder setFavoredNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.set(index, builderForValue.build()); onChanged(); } else { favoredNodeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (favoredNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFavoredNodeIsMutable(); favoredNode_.add(value); onChanged(); } else { favoredNodeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (favoredNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFavoredNodeIsMutable(); favoredNode_.add(index, value); onChanged(); } else { favoredNodeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.add(builderForValue.build()); onChanged(); } else { favoredNodeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.add(index, builderForValue.build()); onChanged(); } else { favoredNodeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addAllFavoredNode( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName> values) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, favoredNode_); onChanged(); } else { favoredNodeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder clearFavoredNode() { if (favoredNodeBuilder_ == null) { favoredNode_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { favoredNodeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder removeFavoredNode(int index) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.remove(index); onChanged(); } else { favoredNodeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getFavoredNodeBuilder( int index) { return getFavoredNodeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( int index) { if (favoredNodeBuilder_ == null) { return favoredNode_.get(index); } else { return favoredNodeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList() { if (favoredNodeBuilder_ != null) { return favoredNodeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(favoredNode_); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodeBuilder() { return getFavoredNodeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodeBuilder( int index) { return getFavoredNodeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder> getFavoredNodeBuilderList() { return getFavoredNodeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeFieldBuilder() { if (favoredNodeBuilder_ == null) { favoredNodeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( favoredNode_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); favoredNode_ = null; } return favoredNodeBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.FavoredNodes) } // @@protoc_insertion_point(class_scope:hbase.pb.FavoredNodes) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FavoredNodes> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FavoredNodes>() { public FavoredNodes parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FavoredNodes(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FavoredNodes> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FavoredNodes> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionSpecifierOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionSpecifier) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ boolean hasType(); /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); /** * <code>required bytes value = 2;</code> */ boolean hasValue(); /** * <code>required bytes value = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue(); } /** * <pre> ** * Container protocol buffer to specify a region. * You can specify region by region name, or the hash * of the region name, which is known as encoded * region name. * </pre> * * Protobuf type {@code hbase.pb.RegionSpecifier} */ public static final class RegionSpecifier extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionSpecifier) RegionSpecifierOrBuilder { // Use RegionSpecifier.newBuilder() to construct. private RegionSpecifier(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionSpecifier() { type_ = 1; value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionSpecifier( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; type_ = rawValue; } break; } case 18: { bitField0_ |= 0x00000002; value_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } /** * Protobuf enum {@code hbase.pb.RegionSpecifier.RegionSpecifierType} */ public enum RegionSpecifierType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <pre> * <tablename>,<startkey>,<regionId>.<encodedName> * </pre> * * <code>REGION_NAME = 1;</code> */ REGION_NAME(1), /** * <pre> * hash of <tablename>,<startkey>,<regionId> * </pre> * * <code>ENCODED_REGION_NAME = 2;</code> */ ENCODED_REGION_NAME(2), ; /** * <pre> * <tablename>,<startkey>,<regionId>.<encodedName> * </pre> * * <code>REGION_NAME = 1;</code> */ public static final int REGION_NAME_VALUE = 1; /** * <pre> * hash of <tablename>,<startkey>,<regionId> * </pre> * * <code>ENCODED_REGION_NAME = 2;</code> */ public static final int ENCODED_REGION_NAME_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static RegionSpecifierType valueOf(int value) { return forNumber(value); } public static RegionSpecifierType forNumber(int value) { switch (value) { case 1: return REGION_NAME; case 2: return ENCODED_REGION_NAME; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< RegionSpecifierType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType>() { public RegionSpecifierType findValueByNumber(int number) { return RegionSpecifierType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); } private static final RegionSpecifierType[] VALUES = values(); public static RegionSpecifierType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private RegionSpecifierType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.RegionSpecifier.RegionSpecifierType) } private int bitField0_; public static final int TYPE_FIELD_NUMBER = 1; private int type_; /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public boolean hasType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(type_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME : result; } public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_; /** * <code>required bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { return value_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasType()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(1, type_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier) obj; boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { result = result && type_ == other.type_; } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Container protocol buffer to specify a region. * You can specify region by region name, or the hash * of the region name, which is known as encoded * region name. * </pre> * * Protobuf type {@code hbase.pb.RegionSpecifier} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionSpecifier) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); type_ = 1; bitField0_ = (bitField0_ & ~0x00000001); value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.type_ = type_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; if (other.hasType()) { setType(other.getType()); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasType()) { return false; } if (!hasValue()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int type_ = 1; /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public boolean hasType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(type_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME : result; } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public Builder setType(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value.getNumber(); onChanged(); return this; } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = 1; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { return value_; } /** * <code>required bytes value = 2;</code> */ public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>required bytes value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionSpecifier) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionSpecifier) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionSpecifier> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionSpecifier>() { public RegionSpecifier parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionSpecifier(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionSpecifier> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionSpecifier> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface TimeRangeOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.TimeRange) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional uint64 from = 1;</code> */ boolean hasFrom(); /** * <code>optional uint64 from = 1;</code> */ long getFrom(); /** * <code>optional uint64 to = 2;</code> */ boolean hasTo(); /** * <code>optional uint64 to = 2;</code> */ long getTo(); } /** * <pre> ** * A range of time. Both from and to are Java time * stamp in milliseconds. If you don't specify a time * range, it means all time. By default, if not * specified, from = 0, and to = Long.MAX_VALUE * </pre> * * Protobuf type {@code hbase.pb.TimeRange} */ public static final class TimeRange extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.TimeRange) TimeRangeOrBuilder { // Use TimeRange.newBuilder() to construct. private TimeRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TimeRange() { from_ = 0L; to_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TimeRange( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; from_ = input.readUInt64(); break; } case 16: { bitField0_ |= 0x00000002; to_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } private int bitField0_; public static final int FROM_FIELD_NUMBER = 1; private long from_; /** * <code>optional uint64 from = 1;</code> */ public boolean hasFrom() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 from = 1;</code> */ public long getFrom() { return from_; } public static final int TO_FIELD_NUMBER = 2; private long to_; /** * <code>optional uint64 to = 2;</code> */ public boolean hasTo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 to = 2;</code> */ public long getTo() { return to_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, from_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, to_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(1, from_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, to_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange) obj; boolean result = true; result = result && (hasFrom() == other.hasFrom()); if (hasFrom()) { result = result && (getFrom() == other.getFrom()); } result = result && (hasTo() == other.hasTo()); if (hasTo()) { result = result && (getTo() == other.getTo()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFrom()) { hash = (37 * hash) + FROM_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getFrom()); } if (hasTo()) { hash = (37 * hash) + TO_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getTo()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * A range of time. Both from and to are Java time * stamp in milliseconds. If you don't specify a time * range, it means all time. By default, if not * specified, from = 0, and to = Long.MAX_VALUE * </pre> * * Protobuf type {@code hbase.pb.TimeRange} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.TimeRange) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); from_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); to_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.from_ = from_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.to_ = to_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; if (other.hasFrom()) { setFrom(other.getFrom()); } if (other.hasTo()) { setTo(other.getTo()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long from_ ; /** * <code>optional uint64 from = 1;</code> */ public boolean hasFrom() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 from = 1;</code> */ public long getFrom() { return from_; } /** * <code>optional uint64 from = 1;</code> */ public Builder setFrom(long value) { bitField0_ |= 0x00000001; from_ = value; onChanged(); return this; } /** * <code>optional uint64 from = 1;</code> */ public Builder clearFrom() { bitField0_ = (bitField0_ & ~0x00000001); from_ = 0L; onChanged(); return this; } private long to_ ; /** * <code>optional uint64 to = 2;</code> */ public boolean hasTo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 to = 2;</code> */ public long getTo() { return to_; } /** * <code>optional uint64 to = 2;</code> */ public Builder setTo(long value) { bitField0_ |= 0x00000002; to_ = value; onChanged(); return this; } /** * <code>optional uint64 to = 2;</code> */ public Builder clearTo() { bitField0_ = (bitField0_ & ~0x00000002); to_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.TimeRange) } // @@protoc_insertion_point(class_scope:hbase.pb.TimeRange) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TimeRange> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<TimeRange>() { public TimeRange parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new TimeRange(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TimeRange> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TimeRange> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ColumnFamilyTimeRangeOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ColumnFamilyTimeRange) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes column_family = 1;</code> */ boolean hasColumnFamily(); /** * <code>required bytes column_family = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getColumnFamily(); /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ boolean hasTimeRange(); /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); } /** * <pre> * ColumnFamily Specific TimeRange * </pre> * * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} */ public static final class ColumnFamilyTimeRange extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ColumnFamilyTimeRange) ColumnFamilyTimeRangeOrBuilder { // Use ColumnFamilyTimeRange.newBuilder() to construct. private ColumnFamilyTimeRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ColumnFamilyTimeRange() { columnFamily_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnFamilyTimeRange( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; columnFamily_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = timeRange_.toBuilder(); } timeRange_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timeRange_); timeRange_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); } private int bitField0_; public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString columnFamily_; /** * <code>required bytes column_family = 1;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes column_family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } public static final int TIME_RANGE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasColumnFamily()) { memoizedIsInitialized = 0; return false; } if (!hasTimeRange()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getTimeRange()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getTimeRange()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) obj; boolean result = true; result = result && (hasColumnFamily() == other.hasColumnFamily()); if (hasColumnFamily()) { result = result && getColumnFamily() .equals(other.getColumnFamily()); } result = result && (hasTimeRange() == other.hasTimeRange()); if (hasTimeRange()) { result = result && getTimeRange() .equals(other.getTimeRange()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasColumnFamily()) { hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; hash = (53 * hash) + getColumnFamily().hashCode(); } if (hasTimeRange()) { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getTimeRange().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * ColumnFamily Specific TimeRange * </pre> * * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ColumnFamilyTimeRange) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getTimeRangeFieldBuilder(); } } public Builder clear() { super.clear(); columnFamily_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (timeRangeBuilder_ == null) { timeRange_ = null; } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.columnFamily_ = columnFamily_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (timeRangeBuilder_ == null) { result.timeRange_ = timeRange_; } else { result.timeRange_ = timeRangeBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()) return this; if (other.hasColumnFamily()) { setColumnFamily(other.getColumnFamily()); } if (other.hasTimeRange()) { mergeTimeRange(other.getTimeRange()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasColumnFamily()) { return false; } if (!hasTimeRange()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString columnFamily_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes column_family = 1;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes column_family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } /** * <code>required bytes column_family = 1;</code> */ public Builder setColumnFamily(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; columnFamily_ = value; onChanged(); return this; } /** * <code>required bytes column_family = 1;</code> */ public Builder clearColumnFamily() { bitField0_ = (bitField0_ & ~0x00000001); columnFamily_ = getDefaultInstance().getColumnFamily(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder setTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeRange_ = value; onChanged(); } else { timeRangeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder setTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { timeRange_ = builderForValue.build(); onChanged(); } else { timeRangeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); } else { timeRange_ = value; } onChanged(); } else { timeRangeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { timeRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; } return timeRangeBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnFamilyTimeRange) } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilyTimeRange) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnFamilyTimeRange> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ColumnFamilyTimeRange>() { public ColumnFamilyTimeRange parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ColumnFamilyTimeRange(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnFamilyTimeRange> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnFamilyTimeRange> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ServerNameOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ServerName) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string host_name = 1;</code> */ boolean hasHostName(); /** * <code>required string host_name = 1;</code> */ java.lang.String getHostName(); /** * <code>required string host_name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getHostNameBytes(); /** * <code>optional uint32 port = 2;</code> */ boolean hasPort(); /** * <code>optional uint32 port = 2;</code> */ int getPort(); /** * <code>optional uint64 start_code = 3;</code> */ boolean hasStartCode(); /** * <code>optional uint64 start_code = 3;</code> */ long getStartCode(); } /** * <pre> ** * Protocol buffer version of ServerName * </pre> * * Protobuf type {@code hbase.pb.ServerName} */ public static final class ServerName extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ServerName) ServerNameOrBuilder { // Use ServerName.newBuilder() to construct. private ServerName(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ServerName() { hostName_ = ""; port_ = 0; startCode_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ServerName( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; hostName_ = bs; break; } case 16: { bitField0_ |= 0x00000002; port_ = input.readUInt32(); break; } case 24: { bitField0_ |= 0x00000004; startCode_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder.class); } private int bitField0_; public static final int HOST_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object hostName_; /** * <code>required string host_name = 1;</code> */ public boolean hasHostName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string host_name = 1;</code> */ public java.lang.String getHostName() { java.lang.Object ref = hostName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostName_ = s; } return s; } } /** * <code>required string host_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getHostNameBytes() { java.lang.Object ref = hostName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int PORT_FIELD_NUMBER = 2; private int port_; /** * <code>optional uint32 port = 2;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint32 port = 2;</code> */ public int getPort() { return port_; } public static final int START_CODE_FIELD_NUMBER = 3; private long startCode_; /** * <code>optional uint64 start_code = 3;</code> */ public boolean hasStartCode() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 start_code = 3;</code> */ public long getStartCode() { return startCode_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasHostName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, hostName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, port_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, startCode_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, hostName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(2, port_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, startCode_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) obj; boolean result = true; result = result && (hasHostName() == other.hasHostName()); if (hasHostName()) { result = result && getHostName() .equals(other.getHostName()); } result = result && (hasPort() == other.hasPort()); if (hasPort()) { result = result && (getPort() == other.getPort()); } result = result && (hasStartCode() == other.hasStartCode()); if (hasStartCode()) { result = result && (getStartCode() == other.getStartCode()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHostName()) { hash = (37 * hash) + HOST_NAME_FIELD_NUMBER; hash = (53 * hash) + getHostName().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } if (hasStartCode()) { hash = (37 * hash) + START_CODE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getStartCode()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Protocol buffer version of ServerName * </pre> * * Protobuf type {@code hbase.pb.ServerName} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ServerName) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); hostName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); port_ = 0; bitField0_ = (bitField0_ & ~0x00000002); startCode_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.hostName_ = hostName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.port_ = port_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.startCode_ = startCode_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; if (other.hasHostName()) { bitField0_ |= 0x00000001; hostName_ = other.hostName_; onChanged(); } if (other.hasPort()) { setPort(other.getPort()); } if (other.hasStartCode()) { setStartCode(other.getStartCode()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasHostName()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object hostName_ = ""; /** * <code>required string host_name = 1;</code> */ public boolean hasHostName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string host_name = 1;</code> */ public java.lang.String getHostName() { java.lang.Object ref = hostName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string host_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getHostNameBytes() { java.lang.Object ref = hostName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string host_name = 1;</code> */ public Builder setHostName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; hostName_ = value; onChanged(); return this; } /** * <code>required string host_name = 1;</code> */ public Builder clearHostName() { bitField0_ = (bitField0_ & ~0x00000001); hostName_ = getDefaultInstance().getHostName(); onChanged(); return this; } /** * <code>required string host_name = 1;</code> */ public Builder setHostNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; hostName_ = value; onChanged(); return this; } private int port_ ; /** * <code>optional uint32 port = 2;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint32 port = 2;</code> */ public int getPort() { return port_; } /** * <code>optional uint32 port = 2;</code> */ public Builder setPort(int value) { bitField0_ |= 0x00000002; port_ = value; onChanged(); return this; } /** * <code>optional uint32 port = 2;</code> */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000002); port_ = 0; onChanged(); return this; } private long startCode_ ; /** * <code>optional uint64 start_code = 3;</code> */ public boolean hasStartCode() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 start_code = 3;</code> */ public long getStartCode() { return startCode_; } /** * <code>optional uint64 start_code = 3;</code> */ public Builder setStartCode(long value) { bitField0_ |= 0x00000004; startCode_ = value; onChanged(); return this; } /** * <code>optional uint64 start_code = 3;</code> */ public Builder clearStartCode() { bitField0_ = (bitField0_ & ~0x00000004); startCode_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ServerName) } // @@protoc_insertion_point(class_scope:hbase.pb.ServerName) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServerName> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ServerName>() { public ServerName parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ServerName(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServerName> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServerName> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CoprocessorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Coprocessor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); } /** * Protobuf type {@code hbase.pb.Coprocessor} */ public static final class Coprocessor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Coprocessor) CoprocessorOrBuilder { // Use Coprocessor.newBuilder() to construct. private Coprocessor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Coprocessor() { name_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Coprocessor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.Coprocessor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Coprocessor) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Coprocessor) } // @@protoc_insertion_point(class_scope:hbase.pb.Coprocessor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Coprocessor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Coprocessor>() { public Coprocessor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Coprocessor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Coprocessor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Coprocessor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NameStringPairOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.NameStringPair) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>required string value = 2;</code> */ boolean hasValue(); /** * <code>required string value = 2;</code> */ java.lang.String getValue(); /** * <code>required string value = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValueBytes(); } /** * Protobuf type {@code hbase.pb.NameStringPair} */ public static final class NameStringPair extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.NameStringPair) NameStringPairOrBuilder { // Use NameStringPair.newBuilder() to construct. private NameStringPair(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NameStringPair() { name_ = ""; value_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NameStringPair( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; value_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private volatile java.lang.Object value_; /** * <code>required string value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string value = 2;</code> */ public java.lang.String getValue() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { value_ = s; } return s; } } /** * <code>required string value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NameStringPair} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.NameStringPair) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); value_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasValue()) { bitField0_ |= 0x00000002; value_ = other.value_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } if (!hasValue()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private java.lang.Object value_ = ""; /** * <code>required string value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string value = 2;</code> */ public java.lang.String getValue() { java.lang.Object ref = value_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { value_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string value = 2;</code> */ public Builder setValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>required string value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } /** * <code>required string value = 2;</code> */ public Builder setValueBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.NameStringPair) } // @@protoc_insertion_point(class_scope:hbase.pb.NameStringPair) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameStringPair> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<NameStringPair>() { public NameStringPair parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new NameStringPair(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameStringPair> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameStringPair> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NameBytesPairOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.NameBytesPair) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>optional bytes value = 2;</code> */ boolean hasValue(); /** * <code>optional bytes value = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue(); } /** * Protobuf type {@code hbase.pb.NameBytesPair} */ public static final class NameBytesPair extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.NameBytesPair) NameBytesPairOrBuilder { // Use NameBytesPair.newBuilder() to construct. private NameBytesPair(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NameBytesPair() { name_ = ""; value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NameBytesPair( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 18: { bitField0_ |= 0x00000002; value_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_; /** * <code>optional bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { return value_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NameBytesPair} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.NameBytesPair) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { return value_; } /** * <code>optional bytes value = 2;</code> */ public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>optional bytes value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.NameBytesPair) } // @@protoc_insertion_point(class_scope:hbase.pb.NameBytesPair) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameBytesPair> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<NameBytesPair>() { public NameBytesPair parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new NameBytesPair(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameBytesPair> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameBytesPair> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface BytesBytesPairOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.BytesBytesPair) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes first = 1;</code> */ boolean hasFirst(); /** * <code>required bytes first = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFirst(); /** * <code>required bytes second = 2;</code> */ boolean hasSecond(); /** * <code>required bytes second = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSecond(); } /** * Protobuf type {@code hbase.pb.BytesBytesPair} */ public static final class BytesBytesPair extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.BytesBytesPair) BytesBytesPairOrBuilder { // Use BytesBytesPair.newBuilder() to construct. private BytesBytesPair(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BytesBytesPair() { first_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; second_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BytesBytesPair( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; first_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; second_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); } private int bitField0_; public static final int FIRST_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString first_; /** * <code>required bytes first = 1;</code> */ public boolean hasFirst() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes first = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFirst() { return first_; } public static final int SECOND_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString second_; /** * <code>required bytes second = 2;</code> */ public boolean hasSecond() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes second = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSecond() { return second_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFirst()) { memoizedIsInitialized = 0; return false; } if (!hasSecond()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, first_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, second_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, first_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, second_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) obj; boolean result = true; result = result && (hasFirst() == other.hasFirst()); if (hasFirst()) { result = result && getFirst() .equals(other.getFirst()); } result = result && (hasSecond() == other.hasSecond()); if (hasSecond()) { result = result && getSecond() .equals(other.getSecond()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFirst()) { hash = (37 * hash) + FIRST_FIELD_NUMBER; hash = (53 * hash) + getFirst().hashCode(); } if (hasSecond()) { hash = (37 * hash) + SECOND_FIELD_NUMBER; hash = (53 * hash) + getSecond().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BytesBytesPair} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.BytesBytesPair) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); first_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); second_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.first_ = first_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.second_ = second_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()) return this; if (other.hasFirst()) { setFirst(other.getFirst()); } if (other.hasSecond()) { setSecond(other.getSecond()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFirst()) { return false; } if (!hasSecond()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString first_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes first = 1;</code> */ public boolean hasFirst() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes first = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFirst() { return first_; } /** * <code>required bytes first = 1;</code> */ public Builder setFirst(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; first_ = value; onChanged(); return this; } /** * <code>required bytes first = 1;</code> */ public Builder clearFirst() { bitField0_ = (bitField0_ & ~0x00000001); first_ = getDefaultInstance().getFirst(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString second_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes second = 2;</code> */ public boolean hasSecond() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes second = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSecond() { return second_; } /** * <code>required bytes second = 2;</code> */ public Builder setSecond(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; second_ = value; onChanged(); return this; } /** * <code>required bytes second = 2;</code> */ public Builder clearSecond() { bitField0_ = (bitField0_ & ~0x00000002); second_ = getDefaultInstance().getSecond(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.BytesBytesPair) } // @@protoc_insertion_point(class_scope:hbase.pb.BytesBytesPair) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BytesBytesPair> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BytesBytesPair>() { public BytesBytesPair parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BytesBytesPair(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BytesBytesPair> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BytesBytesPair> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NameInt64PairOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.NameInt64Pair) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes(); /** * <code>optional int64 value = 2;</code> */ boolean hasValue(); /** * <code>optional int64 value = 2;</code> */ long getValue(); } /** * Protobuf type {@code hbase.pb.NameInt64Pair} */ public static final class NameInt64Pair extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.NameInt64Pair) NameInt64PairOrBuilder { // Use NameInt64Pair.newBuilder() to construct. private NameInt64Pair(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NameInt64Pair() { name_ = ""; value_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NameInt64Pair( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; name_ = bs; break; } case 16: { bitField0_ |= 0x00000002; value_ = input.readInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int VALUE_FIELD_NUMBER = 2; private long value_; /** * <code>optional int64 value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int64 value = 2;</code> */ public long getValue() { return value_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt64(2, value_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt64Size(2, value_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && (getValue() == other.getValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getValue()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NameInt64Pair} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.NameInt64Pair) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); value_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } private long value_ ; /** * <code>optional int64 value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int64 value = 2;</code> */ public long getValue() { return value_; } /** * <code>optional int64 value = 2;</code> */ public Builder setValue(long value) { bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>optional int64 value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.NameInt64Pair) } // @@protoc_insertion_point(class_scope:hbase.pb.NameInt64Pair) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameInt64Pair> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<NameInt64Pair>() { public NameInt64Pair parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new NameInt64Pair(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameInt64Pair> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NameInt64Pair> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ProcedureDescriptionOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureDescription) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ boolean hasSignature(); /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ java.lang.String getSignature(); /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSignatureBytes(); /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ boolean hasInstance(); /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ java.lang.String getInstance(); /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInstanceBytes(); /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ boolean hasCreationTime(); /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ long getCreationTime(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * <pre> ** * Description of the distributed procedure to take * </pre> * * Protobuf type {@code hbase.pb.ProcedureDescription} */ public static final class ProcedureDescription extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ProcedureDescription) ProcedureDescriptionOrBuilder { // Use ProcedureDescription.newBuilder() to construct. private ProcedureDescription(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ProcedureDescription() { signature_ = ""; instance_ = ""; creationTime_ = 0L; configuration_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ProcedureDescription( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; signature_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; instance_ = bs; break; } case 24: { bitField0_ |= 0x00000004; creationTime_ = input.readInt64(); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000008; } configuration_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder.class); } private int bitField0_; public static final int SIGNATURE_FIELD_NUMBER = 1; private volatile java.lang.Object signature_; /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public boolean hasSignature() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public java.lang.String getSignature() { java.lang.Object ref = signature_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { signature_ = s; } return s; } } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSignatureBytes() { java.lang.Object ref = signature_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); signature_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_FIELD_NUMBER = 2; private volatile java.lang.Object instance_; /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public java.lang.String getInstance() { java.lang.Object ref = instance_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { instance_ = s; } return s; } } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInstanceBytes() { java.lang.Object ref = instance_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instance_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int CREATION_TIME_FIELD_NUMBER = 3; private long creationTime_; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public long getCreationTime() { return creationTime_; } public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasSignature()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, signature_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instance_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(3, creationTime_); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(4, configuration_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, signature_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instance_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt64Size(3, creationTime_); } for (int i = 0; i < configuration_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, configuration_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription) obj; boolean result = true; result = result && (hasSignature() == other.hasSignature()); if (hasSignature()) { result = result && getSignature() .equals(other.getSignature()); } result = result && (hasInstance() == other.hasInstance()); if (hasInstance()) { result = result && getInstance() .equals(other.getInstance()); } result = result && (hasCreationTime() == other.hasCreationTime()); if (hasCreationTime()) { result = result && (getCreationTime() == other.getCreationTime()); } result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSignature()) { hash = (37 * hash) + SIGNATURE_FIELD_NUMBER; hash = (53 * hash) + getSignature().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } if (hasCreationTime()) { hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getCreationTime()); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Description of the distributed procedure to take * </pre> * * Protobuf type {@code hbase.pb.ProcedureDescription} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ProcedureDescription) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getConfigurationFieldBuilder(); } } public Builder clear() { super.clear(); signature_ = ""; bitField0_ = (bitField0_ & ~0x00000001); instance_ = ""; bitField0_ = (bitField0_ & ~0x00000002); creationTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { configurationBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.signature_ = signature_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.instance_ = instance_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.creationTime_ = creationTime_; if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000008); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) return this; if (other.hasSignature()) { bitField0_ |= 0x00000001; signature_ = other.signature_; onChanged(); } if (other.hasInstance()) { bitField0_ |= 0x00000002; instance_ = other.instance_; onChanged(); } if (other.hasCreationTime()) { setCreationTime(other.getCreationTime()); } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasSignature()) { return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object signature_ = ""; /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public boolean hasSignature() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public java.lang.String getSignature() { java.lang.Object ref = signature_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { signature_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSignatureBytes() { java.lang.Object ref = signature_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); signature_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public Builder setSignature( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; signature_ = value; onChanged(); return this; } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public Builder clearSignature() { bitField0_ = (bitField0_ & ~0x00000001); signature_ = getDefaultInstance().getSignature(); onChanged(); return this; } /** * <pre> * the unique signature of the procedure * </pre> * * <code>required string signature = 1;</code> */ public Builder setSignatureBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; signature_ = value; onChanged(); return this; } private java.lang.Object instance_ = ""; /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public java.lang.String getInstance() { java.lang.Object ref = instance_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { instance_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInstanceBytes() { java.lang.Object ref = instance_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instance_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public Builder setInstance( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; instance_ = value; onChanged(); return this; } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = getDefaultInstance().getInstance(); onChanged(); return this; } /** * <pre> * the procedure instance name * </pre> * * <code>optional string instance = 2;</code> */ public Builder setInstanceBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; instance_ = value; onChanged(); return this; } private long creationTime_ ; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public long getCreationTime() { return creationTime_; } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public Builder setCreationTime(long value) { bitField0_ |= 0x00000004; creationTime_ = value; onChanged(); return this; } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public Builder clearCreationTime() { bitField0_ = (bitField0_ & ~0x00000004); creationTime_ = 0L; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000008; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureDescription) } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureDescription) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ProcedureDescription> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ProcedureDescription>() { public ProcedureDescription parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ProcedureDescription(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ProcedureDescription> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ProcedureDescription> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface EmptyMsgOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.EmptyMsg) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.EmptyMsg} */ public static final class EmptyMsg extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.EmptyMsg) EmptyMsgOrBuilder { // Use EmptyMsg.newBuilder() to construct. private EmptyMsg(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private EmptyMsg() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EmptyMsg( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg) obj; boolean result = true; result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.EmptyMsg} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.EmptyMsg) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsgOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg(this); onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.EmptyMsg) } // @@protoc_insertion_point(class_scope:hbase.pb.EmptyMsg) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EmptyMsg> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<EmptyMsg>() { public EmptyMsg parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new EmptyMsg(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EmptyMsg> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<EmptyMsg> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface LongMsgOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.LongMsg) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required int64 long_msg = 1;</code> */ boolean hasLongMsg(); /** * <code>required int64 long_msg = 1;</code> */ long getLongMsg(); } /** * Protobuf type {@code hbase.pb.LongMsg} */ public static final class LongMsg extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.LongMsg) LongMsgOrBuilder { // Use LongMsg.newBuilder() to construct. private LongMsg(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LongMsg() { longMsg_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LongMsg( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; longMsg_ = input.readInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.Builder.class); } private int bitField0_; public static final int LONG_MSG_FIELD_NUMBER = 1; private long longMsg_; /** * <code>required int64 long_msg = 1;</code> */ public boolean hasLongMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 long_msg = 1;</code> */ public long getLongMsg() { return longMsg_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasLongMsg()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, longMsg_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt64Size(1, longMsg_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg) obj; boolean result = true; result = result && (hasLongMsg() == other.hasLongMsg()); if (hasLongMsg()) { result = result && (getLongMsg() == other.getLongMsg()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLongMsg()) { hash = (37 * hash) + LONG_MSG_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getLongMsg()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.LongMsg} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.LongMsg) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsgOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); longMsg_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.longMsg_ = longMsg_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance()) return this; if (other.hasLongMsg()) { setLongMsg(other.getLongMsg()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasLongMsg()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long longMsg_ ; /** * <code>required int64 long_msg = 1;</code> */ public boolean hasLongMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 long_msg = 1;</code> */ public long getLongMsg() { return longMsg_; } /** * <code>required int64 long_msg = 1;</code> */ public Builder setLongMsg(long value) { bitField0_ |= 0x00000001; longMsg_ = value; onChanged(); return this; } /** * <code>required int64 long_msg = 1;</code> */ public Builder clearLongMsg() { bitField0_ = (bitField0_ & ~0x00000001); longMsg_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.LongMsg) } // @@protoc_insertion_point(class_scope:hbase.pb.LongMsg) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LongMsg> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<LongMsg>() { public LongMsg parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new LongMsg(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LongMsg> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<LongMsg> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface DoubleMsgOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.DoubleMsg) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required double double_msg = 1;</code> */ boolean hasDoubleMsg(); /** * <code>required double double_msg = 1;</code> */ double getDoubleMsg(); } /** * Protobuf type {@code hbase.pb.DoubleMsg} */ public static final class DoubleMsg extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.DoubleMsg) DoubleMsgOrBuilder { // Use DoubleMsg.newBuilder() to construct. private DoubleMsg(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DoubleMsg() { doubleMsg_ = 0D; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DoubleMsg( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 9: { bitField0_ |= 0x00000001; doubleMsg_ = input.readDouble(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.Builder.class); } private int bitField0_; public static final int DOUBLE_MSG_FIELD_NUMBER = 1; private double doubleMsg_; /** * <code>required double double_msg = 1;</code> */ public boolean hasDoubleMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required double double_msg = 1;</code> */ public double getDoubleMsg() { return doubleMsg_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasDoubleMsg()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeDouble(1, doubleMsg_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeDoubleSize(1, doubleMsg_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg) obj; boolean result = true; result = result && (hasDoubleMsg() == other.hasDoubleMsg()); if (hasDoubleMsg()) { result = result && ( java.lang.Double.doubleToLongBits(getDoubleMsg()) == java.lang.Double.doubleToLongBits( other.getDoubleMsg())); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDoubleMsg()) { hash = (37 * hash) + DOUBLE_MSG_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getDoubleMsg())); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.DoubleMsg} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.DoubleMsg) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsgOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); doubleMsg_ = 0D; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.doubleMsg_ = doubleMsg_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg.getDefaultInstance()) return this; if (other.hasDoubleMsg()) { setDoubleMsg(other.getDoubleMsg()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasDoubleMsg()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private double doubleMsg_ ; /** * <code>required double double_msg = 1;</code> */ public boolean hasDoubleMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required double double_msg = 1;</code> */ public double getDoubleMsg() { return doubleMsg_; } /** * <code>required double double_msg = 1;</code> */ public Builder setDoubleMsg(double value) { bitField0_ |= 0x00000001; doubleMsg_ = value; onChanged(); return this; } /** * <code>required double double_msg = 1;</code> */ public Builder clearDoubleMsg() { bitField0_ = (bitField0_ & ~0x00000001); doubleMsg_ = 0D; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.DoubleMsg) } // @@protoc_insertion_point(class_scope:hbase.pb.DoubleMsg) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleMsg> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<DoubleMsg>() { public DoubleMsg parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new DoubleMsg(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleMsg> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleMsg> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface BigDecimalMsgOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.BigDecimalMsg) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes bigdecimal_msg = 1;</code> */ boolean hasBigdecimalMsg(); /** * <code>required bytes bigdecimal_msg = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBigdecimalMsg(); } /** * Protobuf type {@code hbase.pb.BigDecimalMsg} */ public static final class BigDecimalMsg extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.BigDecimalMsg) BigDecimalMsgOrBuilder { // Use BigDecimalMsg.newBuilder() to construct. private BigDecimalMsg(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BigDecimalMsg() { bigdecimalMsg_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BigDecimalMsg( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; bigdecimalMsg_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); } private int bitField0_; public static final int BIGDECIMAL_MSG_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bigdecimalMsg_; /** * <code>required bytes bigdecimal_msg = 1;</code> */ public boolean hasBigdecimalMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBigdecimalMsg() { return bigdecimalMsg_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasBigdecimalMsg()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, bigdecimalMsg_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, bigdecimalMsg_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg) obj; boolean result = true; result = result && (hasBigdecimalMsg() == other.hasBigdecimalMsg()); if (hasBigdecimalMsg()) { result = result && getBigdecimalMsg() .equals(other.getBigdecimalMsg()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasBigdecimalMsg()) { hash = (37 * hash) + BIGDECIMAL_MSG_FIELD_NUMBER; hash = (53 * hash) + getBigdecimalMsg().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BigDecimalMsg} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.BigDecimalMsg) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsgOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); bigdecimalMsg_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.bigdecimalMsg_ = bigdecimalMsg_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg.getDefaultInstance()) return this; if (other.hasBigdecimalMsg()) { setBigdecimalMsg(other.getBigdecimalMsg()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasBigdecimalMsg()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bigdecimalMsg_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes bigdecimal_msg = 1;</code> */ public boolean hasBigdecimalMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBigdecimalMsg() { return bigdecimalMsg_; } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public Builder setBigdecimalMsg(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; bigdecimalMsg_ = value; onChanged(); return this; } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public Builder clearBigdecimalMsg() { bitField0_ = (bitField0_ & ~0x00000001); bigdecimalMsg_ = getDefaultInstance().getBigdecimalMsg(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.BigDecimalMsg) } // @@protoc_insertion_point(class_scope:hbase.pb.BigDecimalMsg) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BigDecimalMsg> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BigDecimalMsg>() { public BigDecimalMsg parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BigDecimalMsg(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BigDecimalMsg> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BigDecimalMsg> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface UUIDOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.UUID) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required uint64 least_sig_bits = 1;</code> */ boolean hasLeastSigBits(); /** * <code>required uint64 least_sig_bits = 1;</code> */ long getLeastSigBits(); /** * <code>required uint64 most_sig_bits = 2;</code> */ boolean hasMostSigBits(); /** * <code>required uint64 most_sig_bits = 2;</code> */ long getMostSigBits(); } /** * Protobuf type {@code hbase.pb.UUID} */ public static final class UUID extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.UUID) UUIDOrBuilder { // Use UUID.newBuilder() to construct. private UUID(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UUID() { leastSigBits_ = 0L; mostSigBits_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UUID( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; leastSigBits_ = input.readUInt64(); break; } case 16: { bitField0_ |= 0x00000002; mostSigBits_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder.class); } private int bitField0_; public static final int LEAST_SIG_BITS_FIELD_NUMBER = 1; private long leastSigBits_; /** * <code>required uint64 least_sig_bits = 1;</code> */ public boolean hasLeastSigBits() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 least_sig_bits = 1;</code> */ public long getLeastSigBits() { return leastSigBits_; } public static final int MOST_SIG_BITS_FIELD_NUMBER = 2; private long mostSigBits_; /** * <code>required uint64 most_sig_bits = 2;</code> */ public boolean hasMostSigBits() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required uint64 most_sig_bits = 2;</code> */ public long getMostSigBits() { return mostSigBits_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasLeastSigBits()) { memoizedIsInitialized = 0; return false; } if (!hasMostSigBits()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, leastSigBits_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, mostSigBits_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(1, leastSigBits_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, mostSigBits_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID) obj; boolean result = true; result = result && (hasLeastSigBits() == other.hasLeastSigBits()); if (hasLeastSigBits()) { result = result && (getLeastSigBits() == other.getLeastSigBits()); } result = result && (hasMostSigBits() == other.hasMostSigBits()); if (hasMostSigBits()) { result = result && (getMostSigBits() == other.getMostSigBits()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLeastSigBits()) { hash = (37 * hash) + LEAST_SIG_BITS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getLeastSigBits()); } if (hasMostSigBits()) { hash = (37 * hash) + MOST_SIG_BITS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getMostSigBits()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.UUID} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.UUID) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); leastSigBits_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); mostSigBits_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.leastSigBits_ = leastSigBits_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.mostSigBits_ = mostSigBits_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) return this; if (other.hasLeastSigBits()) { setLeastSigBits(other.getLeastSigBits()); } if (other.hasMostSigBits()) { setMostSigBits(other.getMostSigBits()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasLeastSigBits()) { return false; } if (!hasMostSigBits()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long leastSigBits_ ; /** * <code>required uint64 least_sig_bits = 1;</code> */ public boolean hasLeastSigBits() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 least_sig_bits = 1;</code> */ public long getLeastSigBits() { return leastSigBits_; } /** * <code>required uint64 least_sig_bits = 1;</code> */ public Builder setLeastSigBits(long value) { bitField0_ |= 0x00000001; leastSigBits_ = value; onChanged(); return this; } /** * <code>required uint64 least_sig_bits = 1;</code> */ public Builder clearLeastSigBits() { bitField0_ = (bitField0_ & ~0x00000001); leastSigBits_ = 0L; onChanged(); return this; } private long mostSigBits_ ; /** * <code>required uint64 most_sig_bits = 2;</code> */ public boolean hasMostSigBits() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required uint64 most_sig_bits = 2;</code> */ public long getMostSigBits() { return mostSigBits_; } /** * <code>required uint64 most_sig_bits = 2;</code> */ public Builder setMostSigBits(long value) { bitField0_ |= 0x00000002; mostSigBits_ = value; onChanged(); return this; } /** * <code>required uint64 most_sig_bits = 2;</code> */ public Builder clearMostSigBits() { bitField0_ = (bitField0_ & ~0x00000002); mostSigBits_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.UUID) } // @@protoc_insertion_point(class_scope:hbase.pb.UUID) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UUID> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<UUID>() { public UUID parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new UUID(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UUID> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UUID> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface NamespaceDescriptorOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.NamespaceDescriptor) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes name = 1;</code> */ boolean hasName(); /** * <code>required bytes name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getName(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.NamespaceDescriptor} */ public static final class NamespaceDescriptor extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.NamespaceDescriptor) NamespaceDescriptorOrBuilder { // Use NamespaceDescriptor.newBuilder() to construct. private NamespaceDescriptor(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NamespaceDescriptor() { name_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; configuration_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NamespaceDescriptor( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000002; } configuration_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString name_; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getName() { return name_; } public static final int CONFIGURATION_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, name_); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(2, configuration_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, name_); } for (int i = 0; i < configuration_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, configuration_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NamespaceDescriptor} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.NamespaceDescriptor) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getConfigurationFieldBuilder(); } } public Builder clear() { super.clear(); name_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { configurationBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000002); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) return this; if (other.hasName()) { setName(other.getName()); } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000002); configurationBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString name_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getName() { return name_; } /** * <code>required bytes name = 1;</code> */ public Builder setName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required bytes name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.NamespaceDescriptor) } // @@protoc_insertion_point(class_scope:hbase.pb.NamespaceDescriptor) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NamespaceDescriptor> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<NamespaceDescriptor>() { public NamespaceDescriptor parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new NamespaceDescriptor(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NamespaceDescriptor> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<NamespaceDescriptor> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface VersionInfoOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.VersionInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string version = 1;</code> */ boolean hasVersion(); /** * <code>required string version = 1;</code> */ java.lang.String getVersion(); /** * <code>required string version = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getVersionBytes(); /** * <code>required string url = 2;</code> */ boolean hasUrl(); /** * <code>required string url = 2;</code> */ java.lang.String getUrl(); /** * <code>required string url = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getUrlBytes(); /** * <code>required string revision = 3;</code> */ boolean hasRevision(); /** * <code>required string revision = 3;</code> */ java.lang.String getRevision(); /** * <code>required string revision = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRevisionBytes(); /** * <code>required string user = 4;</code> */ boolean hasUser(); /** * <code>required string user = 4;</code> */ java.lang.String getUser(); /** * <code>required string user = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getUserBytes(); /** * <code>required string date = 5;</code> */ boolean hasDate(); /** * <code>required string date = 5;</code> */ java.lang.String getDate(); /** * <code>required string date = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDateBytes(); /** * <code>required string src_checksum = 6;</code> */ boolean hasSrcChecksum(); /** * <code>required string src_checksum = 6;</code> */ java.lang.String getSrcChecksum(); /** * <code>required string src_checksum = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSrcChecksumBytes(); /** * <code>optional uint32 version_major = 7;</code> */ boolean hasVersionMajor(); /** * <code>optional uint32 version_major = 7;</code> */ int getVersionMajor(); /** * <code>optional uint32 version_minor = 8;</code> */ boolean hasVersionMinor(); /** * <code>optional uint32 version_minor = 8;</code> */ int getVersionMinor(); } /** * <pre> * Rpc client version info proto. Included in ConnectionHeader on connection setup * </pre> * * Protobuf type {@code hbase.pb.VersionInfo} */ public static final class VersionInfo extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.VersionInfo) VersionInfoOrBuilder { // Use VersionInfo.newBuilder() to construct. private VersionInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private VersionInfo() { version_ = ""; url_ = ""; revision_ = ""; user_ = ""; date_ = ""; srcChecksum_ = ""; versionMajor_ = 0; versionMinor_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private VersionInfo( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; version_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; url_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; revision_ = bs; break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; user_ = bs; break; } case 42: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; date_ = bs; break; } case 50: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000020; srcChecksum_ = bs; break; } case 56: { bitField0_ |= 0x00000040; versionMajor_ = input.readUInt32(); break; } case 64: { bitField0_ |= 0x00000080; versionMinor_ = input.readUInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder.class); } private int bitField0_; public static final int VERSION_FIELD_NUMBER = 1; private volatile java.lang.Object version_; /** * <code>required string version = 1;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string version = 1;</code> */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { version_ = s; } return s; } } /** * <code>required string version = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); version_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int URL_FIELD_NUMBER = 2; private volatile java.lang.Object url_; /** * <code>required string url = 2;</code> */ public boolean hasUrl() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string url = 2;</code> */ public java.lang.String getUrl() { java.lang.Object ref = url_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { url_ = s; } return s; } } /** * <code>required string url = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getUrlBytes() { java.lang.Object ref = url_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); url_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int REVISION_FIELD_NUMBER = 3; private volatile java.lang.Object revision_; /** * <code>required string revision = 3;</code> */ public boolean hasRevision() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string revision = 3;</code> */ public java.lang.String getRevision() { java.lang.Object ref = revision_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { revision_ = s; } return s; } } /** * <code>required string revision = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRevisionBytes() { java.lang.Object ref = revision_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); revision_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int USER_FIELD_NUMBER = 4; private volatile java.lang.Object user_; /** * <code>required string user = 4;</code> */ public boolean hasUser() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required string user = 4;</code> */ public java.lang.String getUser() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } } /** * <code>required string user = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int DATE_FIELD_NUMBER = 5; private volatile java.lang.Object date_; /** * <code>required string date = 5;</code> */ public boolean hasDate() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required string date = 5;</code> */ public java.lang.String getDate() { java.lang.Object ref = date_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { date_ = s; } return s; } } /** * <code>required string date = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDateBytes() { java.lang.Object ref = date_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); date_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int SRC_CHECKSUM_FIELD_NUMBER = 6; private volatile java.lang.Object srcChecksum_; /** * <code>required string src_checksum = 6;</code> */ public boolean hasSrcChecksum() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>required string src_checksum = 6;</code> */ public java.lang.String getSrcChecksum() { java.lang.Object ref = srcChecksum_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { srcChecksum_ = s; } return s; } } /** * <code>required string src_checksum = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSrcChecksumBytes() { java.lang.Object ref = srcChecksum_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); srcChecksum_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int VERSION_MAJOR_FIELD_NUMBER = 7; private int versionMajor_; /** * <code>optional uint32 version_major = 7;</code> */ public boolean hasVersionMajor() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 version_major = 7;</code> */ public int getVersionMajor() { return versionMajor_; } public static final int VERSION_MINOR_FIELD_NUMBER = 8; private int versionMinor_; /** * <code>optional uint32 version_minor = 8;</code> */ public boolean hasVersionMinor() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 version_minor = 8;</code> */ public int getVersionMinor() { return versionMinor_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasVersion()) { memoizedIsInitialized = 0; return false; } if (!hasUrl()) { memoizedIsInitialized = 0; return false; } if (!hasRevision()) { memoizedIsInitialized = 0; return false; } if (!hasUser()) { memoizedIsInitialized = 0; return false; } if (!hasDate()) { memoizedIsInitialized = 0; return false; } if (!hasSrcChecksum()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, version_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, url_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, revision_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, user_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, date_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, srcChecksum_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(7, versionMajor_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt32(8, versionMinor_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, version_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, url_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, revision_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, user_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, date_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(6, srcChecksum_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(7, versionMajor_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(8, versionMinor_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo) obj; boolean result = true; result = result && (hasVersion() == other.hasVersion()); if (hasVersion()) { result = result && getVersion() .equals(other.getVersion()); } result = result && (hasUrl() == other.hasUrl()); if (hasUrl()) { result = result && getUrl() .equals(other.getUrl()); } result = result && (hasRevision() == other.hasRevision()); if (hasRevision()) { result = result && getRevision() .equals(other.getRevision()); } result = result && (hasUser() == other.hasUser()); if (hasUser()) { result = result && getUser() .equals(other.getUser()); } result = result && (hasDate() == other.hasDate()); if (hasDate()) { result = result && getDate() .equals(other.getDate()); } result = result && (hasSrcChecksum() == other.hasSrcChecksum()); if (hasSrcChecksum()) { result = result && getSrcChecksum() .equals(other.getSrcChecksum()); } result = result && (hasVersionMajor() == other.hasVersionMajor()); if (hasVersionMajor()) { result = result && (getVersionMajor() == other.getVersionMajor()); } result = result && (hasVersionMinor() == other.hasVersionMinor()); if (hasVersionMinor()) { result = result && (getVersionMinor() == other.getVersionMinor()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); } if (hasUrl()) { hash = (37 * hash) + URL_FIELD_NUMBER; hash = (53 * hash) + getUrl().hashCode(); } if (hasRevision()) { hash = (37 * hash) + REVISION_FIELD_NUMBER; hash = (53 * hash) + getRevision().hashCode(); } if (hasUser()) { hash = (37 * hash) + USER_FIELD_NUMBER; hash = (53 * hash) + getUser().hashCode(); } if (hasDate()) { hash = (37 * hash) + DATE_FIELD_NUMBER; hash = (53 * hash) + getDate().hashCode(); } if (hasSrcChecksum()) { hash = (37 * hash) + SRC_CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getSrcChecksum().hashCode(); } if (hasVersionMajor()) { hash = (37 * hash) + VERSION_MAJOR_FIELD_NUMBER; hash = (53 * hash) + getVersionMajor(); } if (hasVersionMinor()) { hash = (37 * hash) + VERSION_MINOR_FIELD_NUMBER; hash = (53 * hash) + getVersionMinor(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Rpc client version info proto. Included in ConnectionHeader on connection setup * </pre> * * Protobuf type {@code hbase.pb.VersionInfo} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.VersionInfo) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); version_ = ""; bitField0_ = (bitField0_ & ~0x00000001); url_ = ""; bitField0_ = (bitField0_ & ~0x00000002); revision_ = ""; bitField0_ = (bitField0_ & ~0x00000004); user_ = ""; bitField0_ = (bitField0_ & ~0x00000008); date_ = ""; bitField0_ = (bitField0_ & ~0x00000010); srcChecksum_ = ""; bitField0_ = (bitField0_ & ~0x00000020); versionMajor_ = 0; bitField0_ = (bitField0_ & ~0x00000040); versionMinor_ = 0; bitField0_ = (bitField0_ & ~0x00000080); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.version_ = version_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.url_ = url_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.revision_ = revision_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.user_ = user_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.date_ = date_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.srcChecksum_ = srcChecksum_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.versionMajor_ = versionMajor_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000080; } result.versionMinor_ = versionMinor_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) return this; if (other.hasVersion()) { bitField0_ |= 0x00000001; version_ = other.version_; onChanged(); } if (other.hasUrl()) { bitField0_ |= 0x00000002; url_ = other.url_; onChanged(); } if (other.hasRevision()) { bitField0_ |= 0x00000004; revision_ = other.revision_; onChanged(); } if (other.hasUser()) { bitField0_ |= 0x00000008; user_ = other.user_; onChanged(); } if (other.hasDate()) { bitField0_ |= 0x00000010; date_ = other.date_; onChanged(); } if (other.hasSrcChecksum()) { bitField0_ |= 0x00000020; srcChecksum_ = other.srcChecksum_; onChanged(); } if (other.hasVersionMajor()) { setVersionMajor(other.getVersionMajor()); } if (other.hasVersionMinor()) { setVersionMinor(other.getVersionMinor()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasVersion()) { return false; } if (!hasUrl()) { return false; } if (!hasRevision()) { return false; } if (!hasUser()) { return false; } if (!hasDate()) { return false; } if (!hasSrcChecksum()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object version_ = ""; /** * <code>required string version = 1;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string version = 1;</code> */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { version_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string version = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); version_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string version = 1;</code> */ public Builder setVersion( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; version_ = value; onChanged(); return this; } /** * <code>required string version = 1;</code> */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = getDefaultInstance().getVersion(); onChanged(); return this; } /** * <code>required string version = 1;</code> */ public Builder setVersionBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; version_ = value; onChanged(); return this; } private java.lang.Object url_ = ""; /** * <code>required string url = 2;</code> */ public boolean hasUrl() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string url = 2;</code> */ public java.lang.String getUrl() { java.lang.Object ref = url_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { url_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string url = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getUrlBytes() { java.lang.Object ref = url_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); url_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string url = 2;</code> */ public Builder setUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; url_ = value; onChanged(); return this; } /** * <code>required string url = 2;</code> */ public Builder clearUrl() { bitField0_ = (bitField0_ & ~0x00000002); url_ = getDefaultInstance().getUrl(); onChanged(); return this; } /** * <code>required string url = 2;</code> */ public Builder setUrlBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; url_ = value; onChanged(); return this; } private java.lang.Object revision_ = ""; /** * <code>required string revision = 3;</code> */ public boolean hasRevision() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string revision = 3;</code> */ public java.lang.String getRevision() { java.lang.Object ref = revision_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { revision_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string revision = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRevisionBytes() { java.lang.Object ref = revision_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); revision_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string revision = 3;</code> */ public Builder setRevision( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; revision_ = value; onChanged(); return this; } /** * <code>required string revision = 3;</code> */ public Builder clearRevision() { bitField0_ = (bitField0_ & ~0x00000004); revision_ = getDefaultInstance().getRevision(); onChanged(); return this; } /** * <code>required string revision = 3;</code> */ public Builder setRevisionBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; revision_ = value; onChanged(); return this; } private java.lang.Object user_ = ""; /** * <code>required string user = 4;</code> */ public boolean hasUser() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required string user = 4;</code> */ public java.lang.String getUser() { java.lang.Object ref = user_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string user = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string user = 4;</code> */ public Builder setUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; user_ = value; onChanged(); return this; } /** * <code>required string user = 4;</code> */ public Builder clearUser() { bitField0_ = (bitField0_ & ~0x00000008); user_ = getDefaultInstance().getUser(); onChanged(); return this; } /** * <code>required string user = 4;</code> */ public Builder setUserBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; user_ = value; onChanged(); return this; } private java.lang.Object date_ = ""; /** * <code>required string date = 5;</code> */ public boolean hasDate() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required string date = 5;</code> */ public java.lang.String getDate() { java.lang.Object ref = date_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { date_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string date = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getDateBytes() { java.lang.Object ref = date_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); date_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string date = 5;</code> */ public Builder setDate( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; date_ = value; onChanged(); return this; } /** * <code>required string date = 5;</code> */ public Builder clearDate() { bitField0_ = (bitField0_ & ~0x00000010); date_ = getDefaultInstance().getDate(); onChanged(); return this; } /** * <code>required string date = 5;</code> */ public Builder setDateBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; date_ = value; onChanged(); return this; } private java.lang.Object srcChecksum_ = ""; /** * <code>required string src_checksum = 6;</code> */ public boolean hasSrcChecksum() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>required string src_checksum = 6;</code> */ public java.lang.String getSrcChecksum() { java.lang.Object ref = srcChecksum_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { srcChecksum_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string src_checksum = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getSrcChecksumBytes() { java.lang.Object ref = srcChecksum_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); srcChecksum_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string src_checksum = 6;</code> */ public Builder setSrcChecksum( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; srcChecksum_ = value; onChanged(); return this; } /** * <code>required string src_checksum = 6;</code> */ public Builder clearSrcChecksum() { bitField0_ = (bitField0_ & ~0x00000020); srcChecksum_ = getDefaultInstance().getSrcChecksum(); onChanged(); return this; } /** * <code>required string src_checksum = 6;</code> */ public Builder setSrcChecksumBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; srcChecksum_ = value; onChanged(); return this; } private int versionMajor_ ; /** * <code>optional uint32 version_major = 7;</code> */ public boolean hasVersionMajor() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 version_major = 7;</code> */ public int getVersionMajor() { return versionMajor_; } /** * <code>optional uint32 version_major = 7;</code> */ public Builder setVersionMajor(int value) { bitField0_ |= 0x00000040; versionMajor_ = value; onChanged(); return this; } /** * <code>optional uint32 version_major = 7;</code> */ public Builder clearVersionMajor() { bitField0_ = (bitField0_ & ~0x00000040); versionMajor_ = 0; onChanged(); return this; } private int versionMinor_ ; /** * <code>optional uint32 version_minor = 8;</code> */ public boolean hasVersionMinor() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 version_minor = 8;</code> */ public int getVersionMinor() { return versionMinor_; } /** * <code>optional uint32 version_minor = 8;</code> */ public Builder setVersionMinor(int value) { bitField0_ |= 0x00000080; versionMinor_ = value; onChanged(); return this; } /** * <code>optional uint32 version_minor = 8;</code> */ public Builder clearVersionMinor() { bitField0_ = (bitField0_ & ~0x00000080); versionMinor_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.VersionInfo) } // @@protoc_insertion_point(class_scope:hbase.pb.VersionInfo) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<VersionInfo> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<VersionInfo>() { public VersionInfo parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new VersionInfo(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<VersionInfo> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<VersionInfo> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionServerInfoOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionServerInfo) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional int32 infoPort = 1;</code> */ boolean hasInfoPort(); /** * <code>optional int32 infoPort = 1;</code> */ int getInfoPort(); /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ boolean hasVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); } /** * <pre> ** * Description of the region server info * </pre> * * Protobuf type {@code hbase.pb.RegionServerInfo} */ public static final class RegionServerInfo extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionServerInfo) RegionServerInfoOrBuilder { // Use RegionServerInfo.newBuilder() to construct. private RegionServerInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionServerInfo() { infoPort_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionServerInfo( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; infoPort_ = input.readInt32(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = versionInfo_.toBuilder(); } versionInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(versionInfo_); versionInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class); } private int bitField0_; public static final int INFOPORT_FIELD_NUMBER = 1; private int infoPort_; /** * <code>optional int32 infoPort = 1;</code> */ public boolean hasInfoPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 infoPort = 1;</code> */ public int getInfoPort() { return infoPort_; } public static final int VERSION_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_; /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, infoPort_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getVersionInfo()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(1, infoPort_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getVersionInfo()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo) obj; boolean result = true; result = result && (hasInfoPort() == other.hasInfoPort()); if (hasInfoPort()) { result = result && (getInfoPort() == other.getInfoPort()); } result = result && (hasVersionInfo() == other.hasVersionInfo()); if (hasVersionInfo()) { result = result && getVersionInfo() .equals(other.getVersionInfo()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasInfoPort()) { hash = (37 * hash) + INFOPORT_FIELD_NUMBER; hash = (53 * hash) + getInfoPort(); } if (hasVersionInfo()) { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Description of the region server info * </pre> * * Protobuf type {@code hbase.pb.RegionServerInfo} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionServerInfo) org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getVersionInfoFieldBuilder(); } } public Builder clear() { super.clear(); infoPort_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (versionInfoBuilder_ == null) { versionInfo_ = null; } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo build() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.infoPort_ = infoPort_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (versionInfoBuilder_ == null) { result.versionInfo_ = versionInfo_; } else { result.versionInfo_ = versionInfoBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance()) return this; if (other.hasInfoPort()) { setInfoPort(other.getInfoPort()); } if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int infoPort_ ; /** * <code>optional int32 infoPort = 1;</code> */ public boolean hasInfoPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 infoPort = 1;</code> */ public int getInfoPort() { return infoPort_; } /** * <code>optional int32 infoPort = 1;</code> */ public Builder setInfoPort(int value) { bitField0_ |= 0x00000001; infoPort_ = value; onChanged(); return this; } /** * <code>optional int32 infoPort = 1;</code> */ public Builder clearInfoPort() { bitField0_ = (bitField0_ & ~0x00000001); infoPort_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_; /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { if (versionInfoBuilder_ == null) { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } else { return versionInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder setVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } versionInfo_ = value; onChanged(); } else { versionInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder setVersionInfo( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder builderForValue) { if (versionInfoBuilder_ == null) { versionInfo_ = builderForValue.build(); onChanged(); } else { versionInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder mergeVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && versionInfo_ != null && versionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) { versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial(); } else { versionInfo_ = value; } onChanged(); } else { versionInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder clearVersionInfo() { if (versionInfoBuilder_ == null) { versionInfo_ = null; onChanged(); } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder getVersionInfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getVersionInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { if (versionInfoBuilder_ != null) { return versionInfoBuilder_.getMessageOrBuilder(); } else { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> getVersionInfoFieldBuilder() { if (versionInfoBuilder_ == null) { versionInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>( getVersionInfo(), getParentForChildren(), isClean()); versionInfo_ = null; } return versionInfoBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerInfo) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerInfo) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionServerInfo> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionServerInfo>() { public RegionServerInfo parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionServerInfo(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionServerInfo> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionServerInfo> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableName_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableName_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableSchema_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableSchema_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableState_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TableState_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnFamilySchema_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionInfo_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionInfo_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FavoredNodes_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_FavoredNodes_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionSpecifier_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TimeRange_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_TimeRange_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ServerName_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ServerName_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Coprocessor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Coprocessor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameStringPair_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NameStringPair_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameBytesPair_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NameBytesPair_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BytesBytesPair_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameInt64Pair_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureDescription_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EmptyMsg_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_EmptyMsg_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_LongMsg_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_LongMsg_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DoubleMsg_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DoubleMsg_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BigDecimalMsg_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UUID_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UUID_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NamespaceDescriptor_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_VersionInfo_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_VersionInfo_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerInfo_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\013HBase.proto\022\010hbase.pb\"1\n\tTableName\022\021\n\t" + "namespace\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014\"\314\001\n\013T" + "ableSchema\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.p" + "b.TableName\022,\n\nattributes\030\002 \003(\0132\030.hbase." + "pb.BytesBytesPair\0225\n\017column_families\030\003 \003" + "(\0132\034.hbase.pb.ColumnFamilySchema\022/\n\rconf" + "iguration\030\004 \003(\0132\030.hbase.pb.NameStringPai" + "r\"x\n\nTableState\022)\n\005state\030\001 \002(\0162\032.hbase.p" + "b.TableState.State\"?\n\005State\022\013\n\007ENABLED\020\000" + "\022\014\n\010DISABLED\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLIN", "G\020\003\"\201\001\n\022ColumnFamilySchema\022\014\n\004name\030\001 \002(\014" + "\022,\n\nattributes\030\002 \003(\0132\030.hbase.pb.BytesByt" + "esPair\022/\n\rconfiguration\030\003 \003(\0132\030.hbase.pb" + ".NameStringPair\"\243\001\n\nRegionInfo\022\021\n\tregion" + "_id\030\001 \002(\004\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb" + ".TableName\022\021\n\tstart_key\030\003 \001(\014\022\017\n\007end_key" + "\030\004 \001(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005split\030\006 \001(\010\022\025" + "\n\nreplica_id\030\007 \001(\005:\0010\":\n\014FavoredNodes\022*\n" + "\014favored_node\030\001 \003(\0132\024.hbase.pb.ServerNam" + "e\"\236\001\n\017RegionSpecifier\022;\n\004type\030\001 \002(\0162-.hb", "ase.pb.RegionSpecifier.RegionSpecifierTy" + "pe\022\r\n\005value\030\002 \002(\014\"?\n\023RegionSpecifierType" + "\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCODED_REGION_NAME" + "\020\002\"%\n\tTimeRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(" + "\004\"W\n\025ColumnFamilyTimeRange\022\025\n\rcolumn_fam" + "ily\030\001 \002(\014\022\'\n\ntime_range\030\002 \002(\0132\023.hbase.pb" + ".TimeRange\"A\n\nServerName\022\021\n\thost_name\030\001 " + "\002(\t\022\014\n\004port\030\002 \001(\r\022\022\n\nstart_code\030\003 \001(\004\"\033\n" + "\013Coprocessor\022\014\n\004name\030\001 \002(\t\"-\n\016NameString" + "Pair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNam", "eBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"" + "/\n\016BytesBytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006seco" + "nd\030\002 \002(\014\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022" + "\r\n\005value\030\002 \001(\003\"\206\001\n\024ProcedureDescription\022" + "\021\n\tsignature\030\001 \002(\t\022\020\n\010instance\030\002 \001(\t\022\030\n\r" + "creation_time\030\003 \001(\003:\0010\022/\n\rconfiguration\030" + "\004 \003(\0132\030.hbase.pb.NameStringPair\"\n\n\010Empty" + "Msg\"\033\n\007LongMsg\022\020\n\010long_msg\030\001 \002(\003\"\037\n\tDoub" + "leMsg\022\022\n\ndouble_msg\030\001 \002(\001\"\'\n\rBigDecimalM" + "sg\022\026\n\016bigdecimal_msg\030\001 \002(\014\"5\n\004UUID\022\026\n\016le", "ast_sig_bits\030\001 \002(\004\022\025\n\rmost_sig_bits\030\002 \002(" + "\004\"T\n\023NamespaceDescriptor\022\014\n\004name\030\001 \002(\014\022/" + "\n\rconfiguration\030\002 \003(\0132\030.hbase.pb.NameStr" + "ingPair\"\235\001\n\013VersionInfo\022\017\n\007version\030\001 \002(\t" + "\022\013\n\003url\030\002 \002(\t\022\020\n\010revision\030\003 \002(\t\022\014\n\004user\030" + "\004 \002(\t\022\014\n\004date\030\005 \002(\t\022\024\n\014src_checksum\030\006 \002(" + "\t\022\025\n\rversion_major\030\007 \001(\r\022\025\n\rversion_mino" + "r\030\010 \001(\r\"Q\n\020RegionServerInfo\022\020\n\010infoPort\030" + "\001 \001(\005\022+\n\014version_info\030\002 \001(\0132\025.hbase.pb.V" + "ersionInfo*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rL", "ESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003" + "\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005N" + "O_OP\020\006*n\n\010TimeUnit\022\017\n\013NANOSECONDS\020\001\022\020\n\014M" + "ICROSECONDS\020\002\022\020\n\014MILLISECONDS\020\003\022\013\n\007SECON" + "DS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HOURS\020\006\022\010\n\004DAYS\020\007BE" + "\n1org.apache.hadoop.hbase.shaded.protobu" + "f.generatedB\013HBaseProtosH\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); internal_static_hbase_pb_TableName_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_TableName_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_TableName_descriptor, new java.lang.String[] { "Namespace", "Qualifier", }); internal_static_hbase_pb_TableSchema_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_TableSchema_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_TableSchema_descriptor, new java.lang.String[] { "TableName", "Attributes", "ColumnFamilies", "Configuration", }); internal_static_hbase_pb_TableState_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_TableState_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_TableState_descriptor, new java.lang.String[] { "State", }); internal_static_hbase_pb_ColumnFamilySchema_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ColumnFamilySchema_descriptor, new java.lang.String[] { "Name", "Attributes", "Configuration", }); internal_static_hbase_pb_RegionInfo_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_RegionInfo_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionInfo_descriptor, new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", "ReplicaId", }); internal_static_hbase_pb_FavoredNodes_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_FavoredNodes_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_FavoredNodes_descriptor, new java.lang.String[] { "FavoredNode", }); internal_static_hbase_pb_RegionSpecifier_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionSpecifier_descriptor, new java.lang.String[] { "Type", "Value", }); internal_static_hbase_pb_TimeRange_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_TimeRange_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_TimeRange_descriptor, new java.lang.String[] { "From", "To", }); internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor, new java.lang.String[] { "ColumnFamily", "TimeRange", }); internal_static_hbase_pb_ServerName_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_hbase_pb_ServerName_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ServerName_descriptor, new java.lang.String[] { "HostName", "Port", "StartCode", }); internal_static_hbase_pb_Coprocessor_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_hbase_pb_Coprocessor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Coprocessor_descriptor, new java.lang.String[] { "Name", }); internal_static_hbase_pb_NameStringPair_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_hbase_pb_NameStringPair_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_NameStringPair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_hbase_pb_NameBytesPair_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_hbase_pb_NameBytesPair_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_NameBytesPair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_hbase_pb_BytesBytesPair_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_BytesBytesPair_descriptor, new java.lang.String[] { "First", "Second", }); internal_static_hbase_pb_NameInt64Pair_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_NameInt64Pair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_hbase_pb_ProcedureDescription_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ProcedureDescription_descriptor, new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", }); internal_static_hbase_pb_EmptyMsg_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_hbase_pb_EmptyMsg_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_EmptyMsg_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_LongMsg_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_hbase_pb_LongMsg_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_LongMsg_descriptor, new java.lang.String[] { "LongMsg", }); internal_static_hbase_pb_DoubleMsg_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_hbase_pb_DoubleMsg_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DoubleMsg_descriptor, new java.lang.String[] { "DoubleMsg", }); internal_static_hbase_pb_BigDecimalMsg_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_BigDecimalMsg_descriptor, new java.lang.String[] { "BigdecimalMsg", }); internal_static_hbase_pb_UUID_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_hbase_pb_UUID_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_UUID_descriptor, new java.lang.String[] { "LeastSigBits", "MostSigBits", }); internal_static_hbase_pb_NamespaceDescriptor_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_NamespaceDescriptor_descriptor, new java.lang.String[] { "Name", "Configuration", }); internal_static_hbase_pb_VersionInfo_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_hbase_pb_VersionInfo_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_VersionInfo_descriptor, new java.lang.String[] { "Version", "Url", "Revision", "User", "Date", "SrcChecksum", "VersionMajor", "VersionMinor", }); internal_static_hbase_pb_RegionServerInfo_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionServerInfo_descriptor, new java.lang.String[] { "InfoPort", "VersionInfo", }); } // @@protoc_insertion_point(outer_class_scope) }