// Generated by the protocol buffer compiler. DO NOT EDIT! // source: HBase.proto package org.apache.hadoop.hbase.protobuf.generated; public final class HBaseProtos { private HBaseProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } /** * Protobuf enum {@code hbase.pb.CompareType} * * <pre> * Comparison operators * </pre> */ public enum CompareType implements com.google.protobuf.ProtocolMessageEnum { /** * <code>LESS = 0;</code> */ LESS(0, 0), /** * <code>LESS_OR_EQUAL = 1;</code> */ LESS_OR_EQUAL(1, 1), /** * <code>EQUAL = 2;</code> */ EQUAL(2, 2), /** * <code>NOT_EQUAL = 3;</code> */ NOT_EQUAL(3, 3), /** * <code>GREATER_OR_EQUAL = 4;</code> */ GREATER_OR_EQUAL(4, 4), /** * <code>GREATER = 5;</code> */ GREATER(5, 5), /** * <code>NO_OP = 6;</code> */ NO_OP(6, 6), ; /** * <code>LESS = 0;</code> */ public static final int LESS_VALUE = 0; /** * <code>LESS_OR_EQUAL = 1;</code> */ public static final int LESS_OR_EQUAL_VALUE = 1; /** * <code>EQUAL = 2;</code> */ public static final int EQUAL_VALUE = 2; /** * <code>NOT_EQUAL = 3;</code> */ public static final int NOT_EQUAL_VALUE = 3; /** * <code>GREATER_OR_EQUAL = 4;</code> */ public static final int GREATER_OR_EQUAL_VALUE = 4; /** * <code>GREATER = 5;</code> */ public static final int GREATER_VALUE = 5; /** * <code>NO_OP = 6;</code> */ public static final int NO_OP_VALUE = 6; public final int getNumber() { return value; } public static CompareType valueOf(int value) { switch (value) { case 0: return LESS; case 1: return LESS_OR_EQUAL; case 2: return EQUAL; case 3: return NOT_EQUAL; case 4: return GREATER_OR_EQUAL; case 5: return GREATER; case 6: return NO_OP; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<CompareType> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<CompareType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<CompareType>() { public CompareType findValueByNumber(int number) { return CompareType.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); } private static final CompareType[] VALUES = values(); public static CompareType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private CompareType(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.CompareType) } /** * Protobuf enum {@code hbase.pb.TimeUnit} */ public enum TimeUnit implements com.google.protobuf.ProtocolMessageEnum { /** * <code>NANOSECONDS = 1;</code> */ NANOSECONDS(0, 1), /** * <code>MICROSECONDS = 2;</code> */ MICROSECONDS(1, 2), /** * <code>MILLISECONDS = 3;</code> */ MILLISECONDS(2, 3), /** * <code>SECONDS = 4;</code> */ SECONDS(3, 4), /** * <code>MINUTES = 5;</code> */ MINUTES(4, 5), /** * <code>HOURS = 6;</code> */ HOURS(5, 6), /** * <code>DAYS = 7;</code> */ DAYS(6, 7), ; /** * <code>NANOSECONDS = 1;</code> */ public static final int NANOSECONDS_VALUE = 1; /** * <code>MICROSECONDS = 2;</code> */ public static final int MICROSECONDS_VALUE = 2; /** * <code>MILLISECONDS = 3;</code> */ public static final int MILLISECONDS_VALUE = 3; /** * <code>SECONDS = 4;</code> */ public static final int SECONDS_VALUE = 4; /** * <code>MINUTES = 5;</code> */ public static final int MINUTES_VALUE = 5; /** * <code>HOURS = 6;</code> */ public static final int HOURS_VALUE = 6; /** * <code>DAYS = 7;</code> */ public static final int DAYS_VALUE = 7; public final int getNumber() { return value; } public static TimeUnit valueOf(int value) { switch (value) { case 1: return NANOSECONDS; case 2: return MICROSECONDS; case 3: return MILLISECONDS; case 4: return SECONDS; case 5: return MINUTES; case 6: return HOURS; case 7: return DAYS; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<TimeUnit> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<TimeUnit> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<TimeUnit>() { public TimeUnit findValueByNumber(int number) { return TimeUnit.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1); } private static final TimeUnit[] VALUES = values(); public static TimeUnit valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private TimeUnit(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.TimeUnit) } public interface TableNameOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes namespace = 1; /** * <code>required bytes namespace = 1;</code> */ boolean hasNamespace(); /** * <code>required bytes namespace = 1;</code> */ com.google.protobuf.ByteString getNamespace(); // required bytes qualifier = 2; /** * <code>required bytes qualifier = 2;</code> */ boolean hasQualifier(); /** * <code>required bytes qualifier = 2;</code> */ com.google.protobuf.ByteString getQualifier(); } /** * Protobuf type {@code hbase.pb.TableName} * * <pre> ** * Table Name * </pre> */ public static final class TableName extends com.google.protobuf.GeneratedMessage implements TableNameOrBuilder { // Use TableName.newBuilder() to construct. private TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TableName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TableName defaultInstance; public static TableName getDefaultInstance() { return defaultInstance; } public TableName getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableName( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; namespace_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; qualifier_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder.class); } public static com.google.protobuf.Parser<TableName> PARSER = new com.google.protobuf.AbstractParser<TableName>() { public TableName parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TableName(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TableName> getParserForType() { return PARSER; } private int bitField0_; // required bytes namespace = 1; public static final int NAMESPACE_FIELD_NUMBER = 1; private com.google.protobuf.ByteString namespace_; /** * <code>required bytes namespace = 1;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes namespace = 1;</code> */ public com.google.protobuf.ByteString getNamespace() { return namespace_; } // required bytes qualifier = 2; public static final int QUALIFIER_FIELD_NUMBER = 2; private com.google.protobuf.ByteString qualifier_; /** * <code>required bytes qualifier = 2;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes qualifier = 2;</code> */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } private void initFields() { namespace_ = com.google.protobuf.ByteString.EMPTY; qualifier_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespace()) { memoizedIsInitialized = 0; return false; } if (!hasQualifier()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, namespace_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, qualifier_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, namespace_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, qualifier_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) obj; boolean result = true; result = result && (hasNamespace() == other.hasNamespace()); if (hasNamespace()) { result = result && getNamespace() .equals(other.getNamespace()); } result = result && (hasQualifier() == other.hasQualifier()); if (hasQualifier()) { result = result && getQualifier() .equals(other.getQualifier()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespace()) { hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getNamespace().hashCode(); } if (hasQualifier()) { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifier().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.TableName} * * <pre> ** * Table Name * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); namespace_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); qualifier_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namespace_ = namespace_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.qualifier_ = qualifier_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) return this; if (other.hasNamespace()) { setNamespace(other.getNamespace()); } if (other.hasQualifier()) { setQualifier(other.getQualifier()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespace()) { return false; } if (!hasQualifier()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes namespace = 1; private com.google.protobuf.ByteString namespace_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes namespace = 1;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes namespace = 1;</code> */ public com.google.protobuf.ByteString getNamespace() { return namespace_; } /** * <code>required bytes namespace = 1;</code> */ public Builder setNamespace(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespace_ = value; onChanged(); return this; } /** * <code>required bytes namespace = 1;</code> */ public Builder clearNamespace() { bitField0_ = (bitField0_ & ~0x00000001); namespace_ = getDefaultInstance().getNamespace(); onChanged(); return this; } // required bytes qualifier = 2; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes qualifier = 2;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes qualifier = 2;</code> */ public com.google.protobuf.ByteString getQualifier() { return qualifier_; } /** * <code>required bytes qualifier = 2;</code> */ public Builder setQualifier(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; qualifier_ = value; onChanged(); return this; } /** * <code>required bytes qualifier = 2;</code> */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000002); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.TableName) } static { defaultInstance = new TableName(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.TableName) } public interface TableSchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .hbase.pb.TableName table_name = 1; /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // repeated .hbase.pb.BytesBytesPair attributes = 2; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ int getAttributesCount(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); // repeated .hbase.pb.ColumnFamilySchema column_families = 3; /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList(); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ int getColumnFamiliesCount(); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList(); /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index); // repeated .hbase.pb.NameStringPair configuration = 4; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.TableSchema} * * <pre> ** * Table Schema * Inspired by the rest TableSchema * </pre> */ public static final class TableSchema extends com.google.protobuf.GeneratedMessage implements TableSchemaOrBuilder { // Use TableSchema.newBuilder() to construct. private TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TableSchema defaultInstance; public static TableSchema getDefaultInstance() { return defaultInstance; } public TableSchema getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableSchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(); mutable_bitField0_ |= 0x00000002; } attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>(); mutable_bitField0_ |= 0x00000004; } columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry)); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000008; } configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); } if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); } if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); } public static com.google.protobuf.Parser<TableSchema> PARSER = new com.google.protobuf.AbstractParser<TableSchema>() { public TableSchema parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TableSchema(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TableSchema> getParserForType() { return PARSER; } private int bitField0_; // optional .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // repeated .hbase.pb.BytesBytesPair attributes = 2; public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { return attributes_.size(); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { return attributes_.get(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } // repeated .hbase.pb.ColumnFamilySchema column_families = 3; public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_; /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() { return columnFamilies_; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList() { return columnFamilies_; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public int getColumnFamiliesCount() { return columnFamilies_.size(); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { return columnFamilies_.get(index); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index) { return columnFamilies_.get(index); } // repeated .hbase.pb.NameStringPair configuration = 4; public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); attributes_ = java.util.Collections.emptyList(); columnFamilies_ = java.util.Collections.emptyList(); configuration_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasTableName()) { if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getColumnFamiliesCount(); i++) { if (!getColumnFamilies(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } for (int i = 0; i < attributes_.size(); i++) { output.writeMessage(2, attributes_.get(i)); } for (int i = 0; i < columnFamilies_.size(); i++) { output.writeMessage(3, columnFamilies_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(4, configuration_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } for (int i = 0; i < attributes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, attributes_.get(i)); } for (int i = 0; i < columnFamilies_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, columnFamilies_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, configuration_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getAttributesList() .equals(other.getAttributesList()); result = result && getColumnFamiliesList() .equals(other.getColumnFamiliesList()); result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (getAttributesCount() > 0) { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getAttributesList().hashCode(); } if (getColumnFamiliesCount() > 0) { hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER; hash = (53 * hash) + getColumnFamiliesList().hashCode(); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.TableSchema} * * <pre> ** * Table Schema * Inspired by the rest TableSchema * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getAttributesFieldBuilder(); getColumnFamiliesFieldBuilder(); getConfigurationFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { attributesBuilder_.clear(); } if (columnFamiliesBuilder_ == null) { columnFamilies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { columnFamiliesBuilder_.clear(); } if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { configurationBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (attributesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); bitField0_ = (bitField0_ & ~0x00000002); } result.attributes_ = attributes_; } else { result.attributes_ = attributesBuilder_.build(); } if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); bitField0_ = (bitField0_ & ~0x00000004); } result.columnFamilies_ = columnFamilies_; } else { result.columnFamilies_ = columnFamiliesBuilder_.build(); } if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000008); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (attributesBuilder_ == null) { if (!other.attributes_.isEmpty()) { if (attributes_.isEmpty()) { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAttributesIsMutable(); attributes_.addAll(other.attributes_); } onChanged(); } } else { if (!other.attributes_.isEmpty()) { if (attributesBuilder_.isEmpty()) { attributesBuilder_.dispose(); attributesBuilder_ = null; attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); attributesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); } } } if (columnFamiliesBuilder_ == null) { if (!other.columnFamilies_.isEmpty()) { if (columnFamilies_.isEmpty()) { columnFamilies_ = other.columnFamilies_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureColumnFamiliesIsMutable(); columnFamilies_.addAll(other.columnFamilies_); } onChanged(); } } else { if (!other.columnFamilies_.isEmpty()) { if (columnFamiliesBuilder_.isEmpty()) { columnFamiliesBuilder_.dispose(); columnFamiliesBuilder_ = null; columnFamilies_ = other.columnFamilies_; bitField0_ = (bitField0_ & ~0x00000004); columnFamiliesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getColumnFamiliesFieldBuilder() : null; } else { columnFamiliesBuilder_.addAllMessages(other.columnFamilies_); } } } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasTableName()) { if (!getTableName().isInitialized()) { return false; } } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { return false; } } for (int i = 0; i < getColumnFamiliesCount(); i++) { if (!getColumnFamilies(i).isInitialized()) { return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional .hbase.pb.TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>optional .hbase.pb.TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // repeated .hbase.pb.BytesBytesPair attributes = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); } else { return attributesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); } else { return attributesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.set(index, value); onChanged(); } else { attributesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.set(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(value); onChanged(); } else { attributesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(index, value); onChanged(); } else { attributesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAllAttributes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); super.addAll(values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { attributesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.remove(index); onChanged(); } else { attributesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { if (attributesBuilder_ != null) { return attributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attributes_); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( attributes_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); attributes_ = null; } return attributesBuilder_; } // repeated .hbase.pb.ColumnFamilySchema column_families = 3; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_ = java.util.Collections.emptyList(); private void ensureColumnFamiliesIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>(columnFamilies_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() { if (columnFamiliesBuilder_ == null) { return java.util.Collections.unmodifiableList(columnFamilies_); } else { return columnFamiliesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public int getColumnFamiliesCount() { if (columnFamiliesBuilder_ == null) { return columnFamilies_.size(); } else { return columnFamiliesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { if (columnFamiliesBuilder_ == null) { return columnFamilies_.get(index); } else { return columnFamiliesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder setColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnFamiliesIsMutable(); columnFamilies_.set(index, value); onChanged(); } else { columnFamiliesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder setColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.set(index, builderForValue.build()); onChanged(); } else { columnFamiliesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnFamiliesIsMutable(); columnFamilies_.add(value); onChanged(); } else { columnFamiliesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnFamiliesIsMutable(); columnFamilies_.add(index, value); onChanged(); } else { columnFamiliesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.add(builderForValue.build()); onChanged(); } else { columnFamiliesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.add(index, builderForValue.build()); onChanged(); } else { columnFamiliesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder addAllColumnFamilies( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> values) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); super.addAll(values, columnFamilies_); onChanged(); } else { columnFamiliesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { columnFamiliesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public Builder removeColumnFamilies(int index) { if (columnFamiliesBuilder_ == null) { ensureColumnFamiliesIsMutable(); columnFamilies_.remove(index); onChanged(); } else { columnFamiliesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder( int index) { return getColumnFamiliesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( int index) { if (columnFamiliesBuilder_ == null) { return columnFamilies_.get(index); } else { return columnFamiliesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(columnFamilies_); } } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() { return getColumnFamiliesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder( int index) { return getColumnFamiliesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder> getColumnFamiliesBuilderList() { return getColumnFamiliesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { columnFamiliesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); columnFamilies_ = null; } return columnFamiliesBuilder_; } // repeated .hbase.pb.NameStringPair configuration = 4; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000008; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); super.addAll(values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.TableSchema) } static { defaultInstance = new TableSchema(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.TableSchema) } public interface TableStateOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.TableState.State state = 1; /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ boolean hasState(); /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State getState(); } /** * Protobuf type {@code hbase.pb.TableState} * * <pre> ** Denotes state of the table * </pre> */ public static final class TableState extends com.google.protobuf.GeneratedMessage implements TableStateOrBuilder { // Use TableState.newBuilder() to construct. private TableState(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TableState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TableState defaultInstance; public static TableState getDefaultInstance() { return defaultInstance; } public TableState getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; state_ = value; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.Builder.class); } public static com.google.protobuf.Parser<TableState> PARSER = new com.google.protobuf.AbstractParser<TableState>() { public TableState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TableState(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TableState> getParserForType() { return PARSER; } /** * Protobuf enum {@code hbase.pb.TableState.State} * * <pre> * Table's current state * </pre> */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** * <code>ENABLED = 0;</code> */ ENABLED(0, 0), /** * <code>DISABLED = 1;</code> */ DISABLED(1, 1), /** * <code>DISABLING = 2;</code> */ DISABLING(2, 2), /** * <code>ENABLING = 3;</code> */ ENABLING(3, 3), ; /** * <code>ENABLED = 0;</code> */ public static final int ENABLED_VALUE = 0; /** * <code>DISABLED = 1;</code> */ public static final int DISABLED_VALUE = 1; /** * <code>DISABLING = 2;</code> */ public static final int DISABLING_VALUE = 2; /** * <code>ENABLING = 3;</code> */ public static final int ENABLING_VALUE = 3; public final int getNumber() { return value; } public static State valueOf(int value) { switch (value) { case 0: return ENABLED; case 1: return DISABLED; case 2: return DISABLING; case 3: return ENABLING; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.getDescriptor().getEnumTypes().get(0); } private static final State[] VALUES = values(); public static State valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private State(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.TableState.State) } private int bitField0_; // required .hbase.pb.TableState.State state = 1; public static final int STATE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State state_; /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State getState() { return state_; } private void initFields() { state_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State.ENABLED; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasState()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, state_.getNumber()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, state_.getNumber()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState) obj; boolean result = true; result = result && (hasState() == other.hasState()); if (hasState()) { result = result && (getState() == other.getState()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getState()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.TableState} * * <pre> ** Denotes state of the table * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); state_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State.ENABLED; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.state_ = state_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.getDefaultInstance()) return this; if (other.hasState()) { setState(other.getState()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasState()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.TableState.State state = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State state_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State.ENABLED; /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ public boolean hasState() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State getState() { return state_; } /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ public Builder setState(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; state_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.TableState.State state = 1;</code> * * <pre> * This is the table's state. * </pre> */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableState.State.ENABLED; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.TableState) } static { defaultInstance = new TableState(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.TableState) } public interface ColumnFamilySchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes name = 1; /** * <code>required bytes name = 1;</code> */ boolean hasName(); /** * <code>required bytes name = 1;</code> */ com.google.protobuf.ByteString getName(); // repeated .hbase.pb.BytesBytesPair attributes = 2; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ int getAttributesCount(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList(); /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index); // repeated .hbase.pb.NameStringPair configuration = 3; /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.ColumnFamilySchema} * * <pre> ** * Column Family Schema * Inspired by the rest ColumSchemaMessage * </pre> */ public static final class ColumnFamilySchema extends com.google.protobuf.GeneratedMessage implements ColumnFamilySchemaOrBuilder { // Use ColumnFamilySchema.newBuilder() to construct. private ColumnFamilySchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ColumnFamilySchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ColumnFamilySchema defaultInstance; public static ColumnFamilySchema getDefaultInstance() { return defaultInstance; } public ColumnFamilySchema getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnFamilySchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(); mutable_bitField0_ |= 0x00000002; } attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000004; } configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); } if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); } public static com.google.protobuf.Parser<ColumnFamilySchema> PARSER = new com.google.protobuf.AbstractParser<ColumnFamilySchema>() { public ColumnFamilySchema parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ColumnFamilySchema(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ColumnFamilySchema> getParserForType() { return PARSER; } private int bitField0_; // required bytes name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString name_; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public com.google.protobuf.ByteString getName() { return name_; } // repeated .hbase.pb.BytesBytesPair attributes = 2; public static final int ATTRIBUTES_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { return attributes_; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { return attributes_.size(); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { return attributes_.get(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { return attributes_.get(index); } // repeated .hbase.pb.NameStringPair configuration = 3; public static final int CONFIGURATION_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private void initFields() { name_ = com.google.protobuf.ByteString.EMPTY; attributes_ = java.util.Collections.emptyList(); configuration_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, name_); } for (int i = 0; i < attributes_.size(); i++) { output.writeMessage(2, attributes_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(3, configuration_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, name_); } for (int i = 0; i < attributes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, attributes_.get(i)); } for (int i = 0; i < configuration_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, configuration_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getAttributesList() .equals(other.getAttributesList()); result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getAttributesCount() > 0) { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getAttributesList().hashCode(); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ColumnFamilySchema} * * <pre> ** * Column Family Schema * Inspired by the rest ColumSchemaMessage * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getAttributesFieldBuilder(); getConfigurationFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { attributesBuilder_.clear(); } if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { configurationBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilySchema_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (attributesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = java.util.Collections.unmodifiableList(attributes_); bitField0_ = (bitField0_ & ~0x00000002); } result.attributes_ = attributes_; } else { result.attributes_ = attributesBuilder_.build(); } if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000004); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) return this; if (other.hasName()) { setName(other.getName()); } if (attributesBuilder_ == null) { if (!other.attributes_.isEmpty()) { if (attributes_.isEmpty()) { attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAttributesIsMutable(); attributes_.addAll(other.attributes_); } onChanged(); } } else { if (!other.attributes_.isEmpty()) { if (attributesBuilder_.isEmpty()) { attributesBuilder_.dispose(); attributesBuilder_ = null; attributes_ = other.attributes_; bitField0_ = (bitField0_ & ~0x00000002); attributesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getAttributesFieldBuilder() : null; } else { attributesBuilder_.addAllMessages(other.attributes_); } } } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000004); configurationBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } for (int i = 0; i < getAttributesCount(); i++) { if (!getAttributes(i).isInitialized()) { return false; } } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes name = 1; private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public com.google.protobuf.ByteString getName() { return name_; } /** * <code>required bytes name = 1;</code> */ public Builder setName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required bytes name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } // repeated .hbase.pb.BytesBytesPair attributes = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ = java.util.Collections.emptyList(); private void ensureAttributesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { if (attributesBuilder_ == null) { return java.util.Collections.unmodifiableList(attributes_); } else { return attributesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public int getAttributesCount() { if (attributesBuilder_ == null) { return attributes_.size(); } else { return attributesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.set(index, value); onChanged(); } else { attributesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.set(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(value); onChanged(); } else { attributesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { if (attributesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributesIsMutable(); attributes_.add(index, value); onChanged(); } else { attributesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.add(index, builderForValue.build()); onChanged(); } else { attributesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder addAllAttributes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); super.addAll(values, attributes_); onChanged(); } else { attributesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder clearAttributes() { if (attributesBuilder_ == null) { attributes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { attributesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public Builder removeAttributes(int index) { if (attributesBuilder_ == null) { ensureAttributesIsMutable(); attributes_.remove(index); onChanged(); } else { attributesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( int index) { return getAttributesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( int index) { if (attributesBuilder_ == null) { return attributes_.get(index); } else { return attributesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList() { if (attributesBuilder_ != null) { return attributesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attributes_); } } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { return getAttributesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( int index) { return getAttributesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getAttributesBuilderList() { return getAttributesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder() { if (attributesBuilder_ == null) { attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( attributes_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); attributes_ = null; } return attributesBuilder_; } // repeated .hbase.pb.NameStringPair configuration = 3; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); super.addAll(values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnFamilySchema) } static { defaultInstance = new ColumnFamilySchema(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilySchema) } public interface RegionInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // required uint64 region_id = 1; /** * <code>required uint64 region_id = 1;</code> */ boolean hasRegionId(); /** * <code>required uint64 region_id = 1;</code> */ long getRegionId(); // required .hbase.pb.TableName table_name = 2; /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ boolean hasTableName(); /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // optional bytes start_key = 3; /** * <code>optional bytes start_key = 3;</code> */ boolean hasStartKey(); /** * <code>optional bytes start_key = 3;</code> */ com.google.protobuf.ByteString getStartKey(); // optional bytes end_key = 4; /** * <code>optional bytes end_key = 4;</code> */ boolean hasEndKey(); /** * <code>optional bytes end_key = 4;</code> */ com.google.protobuf.ByteString getEndKey(); // optional bool offline = 5; /** * <code>optional bool offline = 5;</code> */ boolean hasOffline(); /** * <code>optional bool offline = 5;</code> */ boolean getOffline(); // optional bool split = 6; /** * <code>optional bool split = 6;</code> */ boolean hasSplit(); /** * <code>optional bool split = 6;</code> */ boolean getSplit(); // optional int32 replica_id = 7 [default = 0]; /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ boolean hasReplicaId(); /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ int getReplicaId(); } /** * Protobuf type {@code hbase.pb.RegionInfo} * * <pre> ** * Protocol buffer version of HRegionInfo. * </pre> */ public static final class RegionInfo extends com.google.protobuf.GeneratedMessage implements RegionInfoOrBuilder { // Use RegionInfo.newBuilder() to construct. private RegionInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RegionInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RegionInfo defaultInstance; public static RegionInfo getDefaultInstance() { return defaultInstance; } public RegionInfo getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; regionId_ = input.readUInt64(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { bitField0_ |= 0x00000004; startKey_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000008; endKey_ = input.readBytes(); break; } case 40: { bitField0_ |= 0x00000010; offline_ = input.readBool(); break; } case 48: { bitField0_ |= 0x00000020; split_ = input.readBool(); break; } case 56: { bitField0_ |= 0x00000040; replicaId_ = input.readInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); } public static com.google.protobuf.Parser<RegionInfo> PARSER = new com.google.protobuf.AbstractParser<RegionInfo>() { public RegionInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RegionInfo(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RegionInfo> getParserForType() { return PARSER; } private int bitField0_; // required uint64 region_id = 1; public static final int REGION_ID_FIELD_NUMBER = 1; private long regionId_; /** * <code>required uint64 region_id = 1;</code> */ public boolean hasRegionId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 region_id = 1;</code> */ public long getRegionId() { return regionId_; } // required .hbase.pb.TableName table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // optional bytes start_key = 3; public static final int START_KEY_FIELD_NUMBER = 3; private com.google.protobuf.ByteString startKey_; /** * <code>optional bytes start_key = 3;</code> */ public boolean hasStartKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes start_key = 3;</code> */ public com.google.protobuf.ByteString getStartKey() { return startKey_; } // optional bytes end_key = 4; public static final int END_KEY_FIELD_NUMBER = 4; private com.google.protobuf.ByteString endKey_; /** * <code>optional bytes end_key = 4;</code> */ public boolean hasEndKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes end_key = 4;</code> */ public com.google.protobuf.ByteString getEndKey() { return endKey_; } // optional bool offline = 5; public static final int OFFLINE_FIELD_NUMBER = 5; private boolean offline_; /** * <code>optional bool offline = 5;</code> */ public boolean hasOffline() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool offline = 5;</code> */ public boolean getOffline() { return offline_; } // optional bool split = 6; public static final int SPLIT_FIELD_NUMBER = 6; private boolean split_; /** * <code>optional bool split = 6;</code> */ public boolean hasSplit() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool split = 6;</code> */ public boolean getSplit() { return split_; } // optional int32 replica_id = 7 [default = 0]; public static final int REPLICA_ID_FIELD_NUMBER = 7; private int replicaId_; /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public boolean hasReplicaId() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public int getReplicaId() { return replicaId_; } private void initFields() { regionId_ = 0L; tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); startKey_ = com.google.protobuf.ByteString.EMPTY; endKey_ = com.google.protobuf.ByteString.EMPTY; offline_ = false; split_ = false; replicaId_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegionId()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, regionId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, startKey_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, endKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, offline_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(6, split_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeInt32(7, replicaId_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, regionId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, startKey_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, endKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, offline_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, split_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(7, replicaId_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) obj; boolean result = true; result = result && (hasRegionId() == other.hasRegionId()); if (hasRegionId()) { result = result && (getRegionId() == other.getRegionId()); } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasStartKey() == other.hasStartKey()); if (hasStartKey()) { result = result && getStartKey() .equals(other.getStartKey()); } result = result && (hasEndKey() == other.hasEndKey()); if (hasEndKey()) { result = result && getEndKey() .equals(other.getEndKey()); } result = result && (hasOffline() == other.hasOffline()); if (hasOffline()) { result = result && (getOffline() == other.getOffline()); } result = result && (hasSplit() == other.hasSplit()); if (hasSplit()) { result = result && (getSplit() == other.getSplit()); } result = result && (hasReplicaId() == other.hasReplicaId()); if (hasReplicaId()) { result = result && (getReplicaId() == other.getReplicaId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionId()) { hash = (37 * hash) + REGION_ID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getRegionId()); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasStartKey()) { hash = (37 * hash) + START_KEY_FIELD_NUMBER; hash = (53 * hash) + getStartKey().hashCode(); } if (hasEndKey()) { hash = (37 * hash) + END_KEY_FIELD_NUMBER; hash = (53 * hash) + getEndKey().hashCode(); } if (hasOffline()) { hash = (37 * hash) + OFFLINE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getOffline()); } if (hasSplit()) { hash = (37 * hash) + SPLIT_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getSplit()); } if (hasReplicaId()) { hash = (37 * hash) + REPLICA_ID_FIELD_NUMBER; hash = (53 * hash) + getReplicaId(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RegionInfo} * * <pre> ** * Protocol buffer version of HRegionInfo. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); regionId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); startKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); endKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); offline_ = false; bitField0_ = (bitField0_ & ~0x00000010); split_ = false; bitField0_ = (bitField0_ & ~0x00000020); replicaId_ = 0; bitField0_ = (bitField0_ & ~0x00000040); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionInfo_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.regionId_ = regionId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.startKey_ = startKey_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.endKey_ = endKey_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.offline_ = offline_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.split_ = split_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.replicaId_ = replicaId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; if (other.hasRegionId()) { setRegionId(other.getRegionId()); } if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasStartKey()) { setStartKey(other.getStartKey()); } if (other.hasEndKey()) { setEndKey(other.getEndKey()); } if (other.hasOffline()) { setOffline(other.getOffline()); } if (other.hasSplit()) { setSplit(other.getSplit()); } if (other.hasReplicaId()) { setReplicaId(other.getReplicaId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegionId()) { return false; } if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required uint64 region_id = 1; private long regionId_ ; /** * <code>required uint64 region_id = 1;</code> */ public boolean hasRegionId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 region_id = 1;</code> */ public long getRegionId() { return regionId_; } /** * <code>required uint64 region_id = 1;</code> */ public Builder setRegionId(long value) { bitField0_ |= 0x00000001; regionId_ = value; onChanged(); return this; } /** * <code>required uint64 region_id = 1;</code> */ public Builder clearRegionId() { bitField0_ = (bitField0_ & ~0x00000001); regionId_ = 0L; onChanged(); return this; } // required .hbase.pb.TableName table_name = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .hbase.pb.TableName table_name = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // optional bytes start_key = 3; private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes start_key = 3;</code> */ public boolean hasStartKey() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes start_key = 3;</code> */ public com.google.protobuf.ByteString getStartKey() { return startKey_; } /** * <code>optional bytes start_key = 3;</code> */ public Builder setStartKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; startKey_ = value; onChanged(); return this; } /** * <code>optional bytes start_key = 3;</code> */ public Builder clearStartKey() { bitField0_ = (bitField0_ & ~0x00000004); startKey_ = getDefaultInstance().getStartKey(); onChanged(); return this; } // optional bytes end_key = 4; private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes end_key = 4;</code> */ public boolean hasEndKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes end_key = 4;</code> */ public com.google.protobuf.ByteString getEndKey() { return endKey_; } /** * <code>optional bytes end_key = 4;</code> */ public Builder setEndKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; endKey_ = value; onChanged(); return this; } /** * <code>optional bytes end_key = 4;</code> */ public Builder clearEndKey() { bitField0_ = (bitField0_ & ~0x00000008); endKey_ = getDefaultInstance().getEndKey(); onChanged(); return this; } // optional bool offline = 5; private boolean offline_ ; /** * <code>optional bool offline = 5;</code> */ public boolean hasOffline() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool offline = 5;</code> */ public boolean getOffline() { return offline_; } /** * <code>optional bool offline = 5;</code> */ public Builder setOffline(boolean value) { bitField0_ |= 0x00000010; offline_ = value; onChanged(); return this; } /** * <code>optional bool offline = 5;</code> */ public Builder clearOffline() { bitField0_ = (bitField0_ & ~0x00000010); offline_ = false; onChanged(); return this; } // optional bool split = 6; private boolean split_ ; /** * <code>optional bool split = 6;</code> */ public boolean hasSplit() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool split = 6;</code> */ public boolean getSplit() { return split_; } /** * <code>optional bool split = 6;</code> */ public Builder setSplit(boolean value) { bitField0_ |= 0x00000020; split_ = value; onChanged(); return this; } /** * <code>optional bool split = 6;</code> */ public Builder clearSplit() { bitField0_ = (bitField0_ & ~0x00000020); split_ = false; onChanged(); return this; } // optional int32 replica_id = 7 [default = 0]; private int replicaId_ ; /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public boolean hasReplicaId() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public int getReplicaId() { return replicaId_; } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public Builder setReplicaId(int value) { bitField0_ |= 0x00000040; replicaId_ = value; onChanged(); return this; } /** * <code>optional int32 replica_id = 7 [default = 0];</code> */ public Builder clearReplicaId() { bitField0_ = (bitField0_ & ~0x00000040); replicaId_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionInfo) } static { defaultInstance = new RegionInfo(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RegionInfo) } public interface FavoredNodesOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .hbase.pb.ServerName favored_node = 1; /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList(); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ int getFavoredNodeCount(); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList(); /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.FavoredNodes} * * <pre> ** * Protocol buffer for favored nodes * </pre> */ public static final class FavoredNodes extends com.google.protobuf.GeneratedMessage implements FavoredNodesOrBuilder { // Use FavoredNodes.newBuilder() to construct. private FavoredNodes(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FavoredNodes(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FavoredNodes defaultInstance; public static FavoredNodes getDefaultInstance() { return defaultInstance; } public FavoredNodes getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FavoredNodes( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(); mutable_bitField0_ |= 0x00000001; } favoredNode_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); } public static com.google.protobuf.Parser<FavoredNodes> PARSER = new com.google.protobuf.AbstractParser<FavoredNodes>() { public FavoredNodes parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FavoredNodes(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FavoredNodes> getParserForType() { return PARSER; } // repeated .hbase.pb.ServerName favored_node = 1; public static final int FAVORED_NODE_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNode_; /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList() { return favoredNode_; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList() { return favoredNode_; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public int getFavoredNodeCount() { return favoredNode_.size(); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index) { return favoredNode_.get(index); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( int index) { return favoredNode_.get(index); } private void initFields() { favoredNode_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getFavoredNodeCount(); i++) { if (!getFavoredNode(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < favoredNode_.size(); i++) { output.writeMessage(1, favoredNode_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < favoredNode_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, favoredNode_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes) obj; boolean result = true; result = result && getFavoredNodeList() .equals(other.getFavoredNodeList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getFavoredNodeCount() > 0) { hash = (37 * hash) + FAVORED_NODE_FIELD_NUMBER; hash = (53 * hash) + getFavoredNodeList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FavoredNodes} * * <pre> ** * Protocol buffer for favored nodes * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getFavoredNodeFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (favoredNodeBuilder_ == null) { favoredNode_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { favoredNodeBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_FavoredNodes_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes(this); int from_bitField0_ = bitField0_; if (favoredNodeBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); bitField0_ = (bitField0_ & ~0x00000001); } result.favoredNode_ = favoredNode_; } else { result.favoredNode_ = favoredNodeBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.getDefaultInstance()) return this; if (favoredNodeBuilder_ == null) { if (!other.favoredNode_.isEmpty()) { if (favoredNode_.isEmpty()) { favoredNode_ = other.favoredNode_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFavoredNodeIsMutable(); favoredNode_.addAll(other.favoredNode_); } onChanged(); } } else { if (!other.favoredNode_.isEmpty()) { if (favoredNodeBuilder_.isEmpty()) { favoredNodeBuilder_.dispose(); favoredNodeBuilder_ = null; favoredNode_ = other.favoredNode_; bitField0_ = (bitField0_ & ~0x00000001); favoredNodeBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getFavoredNodeFieldBuilder() : null; } else { favoredNodeBuilder_.addAllMessages(other.favoredNode_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getFavoredNodeCount(); i++) { if (!getFavoredNode(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .hbase.pb.ServerName favored_node = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNode_ = java.util.Collections.emptyList(); private void ensureFavoredNodeIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { favoredNode_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(favoredNode_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodeBuilder_; /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList() { if (favoredNodeBuilder_ == null) { return java.util.Collections.unmodifiableList(favoredNode_); } else { return favoredNodeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public int getFavoredNodeCount() { if (favoredNodeBuilder_ == null) { return favoredNode_.size(); } else { return favoredNodeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index) { if (favoredNodeBuilder_ == null) { return favoredNode_.get(index); } else { return favoredNodeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder setFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (favoredNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFavoredNodeIsMutable(); favoredNode_.set(index, value); onChanged(); } else { favoredNodeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder setFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.set(index, builderForValue.build()); onChanged(); } else { favoredNodeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (favoredNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFavoredNodeIsMutable(); favoredNode_.add(value); onChanged(); } else { favoredNodeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (favoredNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFavoredNodeIsMutable(); favoredNode_.add(index, value); onChanged(); } else { favoredNodeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.add(builderForValue.build()); onChanged(); } else { favoredNodeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.add(index, builderForValue.build()); onChanged(); } else { favoredNodeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder addAllFavoredNode( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); super.addAll(values, favoredNode_); onChanged(); } else { favoredNodeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder clearFavoredNode() { if (favoredNodeBuilder_ == null) { favoredNode_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { favoredNodeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public Builder removeFavoredNode(int index) { if (favoredNodeBuilder_ == null) { ensureFavoredNodeIsMutable(); favoredNode_.remove(index); onChanged(); } else { favoredNodeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getFavoredNodeBuilder( int index) { return getFavoredNodeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( int index) { if (favoredNodeBuilder_ == null) { return favoredNode_.get(index); } else { return favoredNodeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList() { if (favoredNodeBuilder_ != null) { return favoredNodeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(favoredNode_); } } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodeBuilder() { return getFavoredNodeFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodeBuilder( int index) { return getFavoredNodeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ServerName favored_node = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> getFavoredNodeBuilderList() { return getFavoredNodeFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeFieldBuilder() { if (favoredNodeBuilder_ == null) { favoredNodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( favoredNode_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); favoredNode_ = null; } return favoredNodeBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.FavoredNodes) } static { defaultInstance = new FavoredNodes(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FavoredNodes) } public interface RegionSpecifierOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ boolean hasType(); /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); // required bytes value = 2; /** * <code>required bytes value = 2;</code> */ boolean hasValue(); /** * <code>required bytes value = 2;</code> */ com.google.protobuf.ByteString getValue(); } /** * Protobuf type {@code hbase.pb.RegionSpecifier} * * <pre> ** * Container protocol buffer to specify a region. * You can specify region by region name, or the hash * of the region name, which is known as encoded * region name. * </pre> */ public static final class RegionSpecifier extends com.google.protobuf.GeneratedMessage implements RegionSpecifierOrBuilder { // Use RegionSpecifier.newBuilder() to construct. private RegionSpecifier(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RegionSpecifier(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RegionSpecifier defaultInstance; public static RegionSpecifier getDefaultInstance() { return defaultInstance; } public RegionSpecifier getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionSpecifier( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; type_ = value; } break; } case 18: { bitField0_ |= 0x00000002; value_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } public static com.google.protobuf.Parser<RegionSpecifier> PARSER = new com.google.protobuf.AbstractParser<RegionSpecifier>() { public RegionSpecifier parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RegionSpecifier(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RegionSpecifier> getParserForType() { return PARSER; } /** * Protobuf enum {@code hbase.pb.RegionSpecifier.RegionSpecifierType} */ public enum RegionSpecifierType implements com.google.protobuf.ProtocolMessageEnum { /** * <code>REGION_NAME = 1;</code> * * <pre> * <tablename>,<startkey>,<regionId>.<encodedName> * </pre> */ REGION_NAME(0, 1), /** * <code>ENCODED_REGION_NAME = 2;</code> * * <pre> * hash of <tablename>,<startkey>,<regionId> * </pre> */ ENCODED_REGION_NAME(1, 2), ; /** * <code>REGION_NAME = 1;</code> * * <pre> * <tablename>,<startkey>,<regionId>.<encodedName> * </pre> */ public static final int REGION_NAME_VALUE = 1; /** * <code>ENCODED_REGION_NAME = 2;</code> * * <pre> * hash of <tablename>,<startkey>,<regionId> * </pre> */ public static final int ENCODED_REGION_NAME_VALUE = 2; public final int getNumber() { return value; } public static RegionSpecifierType valueOf(int value) { switch (value) { case 1: return REGION_NAME; case 2: return ENCODED_REGION_NAME; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType>() { public RegionSpecifierType findValueByNumber(int number) { return RegionSpecifierType.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); } private static final RegionSpecifierType[] VALUES = values(); public static RegionSpecifierType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private RegionSpecifierType(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.RegionSpecifier.RegionSpecifierType) } private int bitField0_; // required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; public static final int TYPE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public boolean hasType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { return type_; } // required bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; /** * <code>required bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes value = 2;</code> */ public com.google.protobuf.ByteString getValue() { return value_; } private void initFields() { type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; value_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasType()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, type_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, type_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) obj; boolean result = true; result = result && (hasType() == other.hasType()); if (hasType()) { result = result && (getType() == other.getType()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getType()); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RegionSpecifier} * * <pre> ** * Container protocol buffer to specify a region. * You can specify region by region name, or the hash * of the region name, which is known as encoded * region name. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; bitField0_ = (bitField0_ & ~0x00000001); value_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionSpecifier_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.type_ = type_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; if (other.hasType()) { setType(other.getType()); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasType()) { return false; } if (!hasValue()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public boolean hasType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { return type_; } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.RegionSpecifier.RegionSpecifierType type = 1;</code> */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; onChanged(); return this; } // required bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes value = 2;</code> */ public com.google.protobuf.ByteString getValue() { return value_; } /** * <code>required bytes value = 2;</code> */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>required bytes value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionSpecifier) } static { defaultInstance = new RegionSpecifier(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RegionSpecifier) } public interface TimeRangeOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional uint64 from = 1; /** * <code>optional uint64 from = 1;</code> */ boolean hasFrom(); /** * <code>optional uint64 from = 1;</code> */ long getFrom(); // optional uint64 to = 2; /** * <code>optional uint64 to = 2;</code> */ boolean hasTo(); /** * <code>optional uint64 to = 2;</code> */ long getTo(); } /** * Protobuf type {@code hbase.pb.TimeRange} * * <pre> ** * A range of time. Both from and to are Java time * stamp in milliseconds. If you don't specify a time * range, it means all time. By default, if not * specified, from = 0, and to = Long.MAX_VALUE * </pre> */ public static final class TimeRange extends com.google.protobuf.GeneratedMessage implements TimeRangeOrBuilder { // Use TimeRange.newBuilder() to construct. private TimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TimeRange defaultInstance; public static TimeRange getDefaultInstance() { return defaultInstance; } public TimeRange getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TimeRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; from_ = input.readUInt64(); break; } case 16: { bitField0_ |= 0x00000002; to_ = input.readUInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } public static com.google.protobuf.Parser<TimeRange> PARSER = new com.google.protobuf.AbstractParser<TimeRange>() { public TimeRange parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TimeRange(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TimeRange> getParserForType() { return PARSER; } private int bitField0_; // optional uint64 from = 1; public static final int FROM_FIELD_NUMBER = 1; private long from_; /** * <code>optional uint64 from = 1;</code> */ public boolean hasFrom() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 from = 1;</code> */ public long getFrom() { return from_; } // optional uint64 to = 2; public static final int TO_FIELD_NUMBER = 2; private long to_; /** * <code>optional uint64 to = 2;</code> */ public boolean hasTo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 to = 2;</code> */ public long getTo() { return to_; } private void initFields() { from_ = 0L; to_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, from_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, to_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, from_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, to_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) obj; boolean result = true; result = result && (hasFrom() == other.hasFrom()); if (hasFrom()) { result = result && (getFrom() == other.getFrom()); } result = result && (hasTo() == other.hasTo()); if (hasTo()) { result = result && (getTo() == other.getTo()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFrom()) { hash = (37 * hash) + FROM_FIELD_NUMBER; hash = (53 * hash) + hashLong(getFrom()); } if (hasTo()) { hash = (37 * hash) + TO_FIELD_NUMBER; hash = (53 * hash) + hashLong(getTo()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.TimeRange} * * <pre> ** * A range of time. Both from and to are Java time * stamp in milliseconds. If you don't specify a time * range, it means all time. By default, if not * specified, from = 0, and to = Long.MAX_VALUE * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); from_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); to_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TimeRange_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.from_ = from_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.to_ = to_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; if (other.hasFrom()) { setFrom(other.getFrom()); } if (other.hasTo()) { setTo(other.getTo()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional uint64 from = 1; private long from_ ; /** * <code>optional uint64 from = 1;</code> */ public boolean hasFrom() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 from = 1;</code> */ public long getFrom() { return from_; } /** * <code>optional uint64 from = 1;</code> */ public Builder setFrom(long value) { bitField0_ |= 0x00000001; from_ = value; onChanged(); return this; } /** * <code>optional uint64 from = 1;</code> */ public Builder clearFrom() { bitField0_ = (bitField0_ & ~0x00000001); from_ = 0L; onChanged(); return this; } // optional uint64 to = 2; private long to_ ; /** * <code>optional uint64 to = 2;</code> */ public boolean hasTo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 to = 2;</code> */ public long getTo() { return to_; } /** * <code>optional uint64 to = 2;</code> */ public Builder setTo(long value) { bitField0_ |= 0x00000002; to_ = value; onChanged(); return this; } /** * <code>optional uint64 to = 2;</code> */ public Builder clearTo() { bitField0_ = (bitField0_ & ~0x00000002); to_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.TimeRange) } static { defaultInstance = new TimeRange(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.TimeRange) } public interface ColumnFamilyTimeRangeOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes column_family = 1; /** * <code>required bytes column_family = 1;</code> */ boolean hasColumnFamily(); /** * <code>required bytes column_family = 1;</code> */ com.google.protobuf.ByteString getColumnFamily(); // required .hbase.pb.TimeRange time_range = 2; /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ boolean hasTimeRange(); /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); } /** * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} * * <pre> * ColumnFamily Specific TimeRange * </pre> */ public static final class ColumnFamilyTimeRange extends com.google.protobuf.GeneratedMessage implements ColumnFamilyTimeRangeOrBuilder { // Use ColumnFamilyTimeRange.newBuilder() to construct. private ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ColumnFamilyTimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ColumnFamilyTimeRange defaultInstance; public static ColumnFamilyTimeRange getDefaultInstance() { return defaultInstance; } public ColumnFamilyTimeRange getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnFamilyTimeRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; columnFamily_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = timeRange_.toBuilder(); } timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timeRange_); timeRange_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); } public static com.google.protobuf.Parser<ColumnFamilyTimeRange> PARSER = new com.google.protobuf.AbstractParser<ColumnFamilyTimeRange>() { public ColumnFamilyTimeRange parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ColumnFamilyTimeRange(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ColumnFamilyTimeRange> getParserForType() { return PARSER; } private int bitField0_; // required bytes column_family = 1; public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString columnFamily_; /** * <code>required bytes column_family = 1;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes column_family = 1;</code> */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } // required .hbase.pb.TimeRange time_range = 2; public static final int TIME_RANGE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_; } private void initFields() { columnFamily_ = com.google.protobuf.ByteString.EMPTY; timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasColumnFamily()) { memoizedIsInitialized = 0; return false; } if (!hasTimeRange()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, timeRange_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, columnFamily_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, timeRange_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) obj; boolean result = true; result = result && (hasColumnFamily() == other.hasColumnFamily()); if (hasColumnFamily()) { result = result && getColumnFamily() .equals(other.getColumnFamily()); } result = result && (hasTimeRange() == other.hasTimeRange()); if (hasTimeRange()) { result = result && getTimeRange() .equals(other.getTimeRange()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasColumnFamily()) { hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; hash = (53 * hash) + getColumnFamily().hashCode(); } if (hasTimeRange()) { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getTimeRange().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ColumnFamilyTimeRange} * * <pre> * ColumnFamily Specific TimeRange * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTimeRangeFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); columnFamily_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (timeRangeBuilder_ == null) { timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.columnFamily_ = columnFamily_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (timeRangeBuilder_ == null) { result.timeRange_ = timeRange_; } else { result.timeRange_ = timeRangeBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()) return this; if (other.hasColumnFamily()) { setColumnFamily(other.getColumnFamily()); } if (other.hasTimeRange()) { mergeTimeRange(other.getTimeRange()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasColumnFamily()) { return false; } if (!hasTimeRange()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes column_family = 1; private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes column_family = 1;</code> */ public boolean hasColumnFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes column_family = 1;</code> */ public com.google.protobuf.ByteString getColumnFamily() { return columnFamily_; } /** * <code>required bytes column_family = 1;</code> */ public Builder setColumnFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; columnFamily_ = value; onChanged(); return this; } /** * <code>required bytes column_family = 1;</code> */ public Builder clearColumnFamily() { bitField0_ = (bitField0_ & ~0x00000001); columnFamily_ = getDefaultInstance().getColumnFamily(); onChanged(); return this; } // required .hbase.pb.TimeRange time_range = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_; } else { return timeRangeBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeRange_ = value; onChanged(); } else { timeRangeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { timeRange_ = builderForValue.build(); onChanged(); } else { timeRangeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); } else { timeRange_ = value; } onChanged(); } else { timeRangeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); onChanged(); } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { return timeRange_; } } /** * <code>required .hbase.pb.TimeRange time_range = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( timeRange_, getParentForChildren(), isClean()); timeRange_ = null; } return timeRangeBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.ColumnFamilyTimeRange) } static { defaultInstance = new ColumnFamilyTimeRange(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ColumnFamilyTimeRange) } public interface ServerNameOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string host_name = 1; /** * <code>required string host_name = 1;</code> */ boolean hasHostName(); /** * <code>required string host_name = 1;</code> */ java.lang.String getHostName(); /** * <code>required string host_name = 1;</code> */ com.google.protobuf.ByteString getHostNameBytes(); // optional uint32 port = 2; /** * <code>optional uint32 port = 2;</code> */ boolean hasPort(); /** * <code>optional uint32 port = 2;</code> */ int getPort(); // optional uint64 start_code = 3; /** * <code>optional uint64 start_code = 3;</code> */ boolean hasStartCode(); /** * <code>optional uint64 start_code = 3;</code> */ long getStartCode(); } /** * Protobuf type {@code hbase.pb.ServerName} * * <pre> ** * Protocol buffer version of ServerName * </pre> */ public static final class ServerName extends com.google.protobuf.GeneratedMessage implements ServerNameOrBuilder { // Use ServerName.newBuilder() to construct. private ServerName(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ServerName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ServerName defaultInstance; public static ServerName getDefaultInstance() { return defaultInstance; } public ServerName getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ServerName( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; hostName_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; port_ = input.readUInt32(); break; } case 24: { bitField0_ |= 0x00000004; startCode_ = input.readUInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); } public static com.google.protobuf.Parser<ServerName> PARSER = new com.google.protobuf.AbstractParser<ServerName>() { public ServerName parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ServerName(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ServerName> getParserForType() { return PARSER; } private int bitField0_; // required string host_name = 1; public static final int HOST_NAME_FIELD_NUMBER = 1; private java.lang.Object hostName_; /** * <code>required string host_name = 1;</code> */ public boolean hasHostName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string host_name = 1;</code> */ public java.lang.String getHostName() { java.lang.Object ref = hostName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostName_ = s; } return s; } } /** * <code>required string host_name = 1;</code> */ public com.google.protobuf.ByteString getHostNameBytes() { java.lang.Object ref = hostName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional uint32 port = 2; public static final int PORT_FIELD_NUMBER = 2; private int port_; /** * <code>optional uint32 port = 2;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint32 port = 2;</code> */ public int getPort() { return port_; } // optional uint64 start_code = 3; public static final int START_CODE_FIELD_NUMBER = 3; private long startCode_; /** * <code>optional uint64 start_code = 3;</code> */ public boolean hasStartCode() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 start_code = 3;</code> */ public long getStartCode() { return startCode_; } private void initFields() { hostName_ = ""; port_ = 0; startCode_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasHostName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getHostNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, port_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, startCode_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getHostNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(2, port_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, startCode_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) obj; boolean result = true; result = result && (hasHostName() == other.hasHostName()); if (hasHostName()) { result = result && getHostName() .equals(other.getHostName()); } result = result && (hasPort() == other.hasPort()); if (hasPort()) { result = result && (getPort() == other.getPort()); } result = result && (hasStartCode() == other.hasStartCode()); if (hasStartCode()) { result = result && (getStartCode() == other.getStartCode()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHostName()) { hash = (37 * hash) + HOST_NAME_FIELD_NUMBER; hash = (53 * hash) + getHostName().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } if (hasStartCode()) { hash = (37 * hash) + START_CODE_FIELD_NUMBER; hash = (53 * hash) + hashLong(getStartCode()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ServerName} * * <pre> ** * Protocol buffer version of ServerName * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); hostName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); port_ = 0; bitField0_ = (bitField0_ & ~0x00000002); startCode_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ServerName_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.hostName_ = hostName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.port_ = port_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.startCode_ = startCode_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; if (other.hasHostName()) { bitField0_ |= 0x00000001; hostName_ = other.hostName_; onChanged(); } if (other.hasPort()) { setPort(other.getPort()); } if (other.hasStartCode()) { setStartCode(other.getStartCode()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasHostName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string host_name = 1; private java.lang.Object hostName_ = ""; /** * <code>required string host_name = 1;</code> */ public boolean hasHostName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string host_name = 1;</code> */ public java.lang.String getHostName() { java.lang.Object ref = hostName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); hostName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string host_name = 1;</code> */ public com.google.protobuf.ByteString getHostNameBytes() { java.lang.Object ref = hostName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string host_name = 1;</code> */ public Builder setHostName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; hostName_ = value; onChanged(); return this; } /** * <code>required string host_name = 1;</code> */ public Builder clearHostName() { bitField0_ = (bitField0_ & ~0x00000001); hostName_ = getDefaultInstance().getHostName(); onChanged(); return this; } /** * <code>required string host_name = 1;</code> */ public Builder setHostNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; hostName_ = value; onChanged(); return this; } // optional uint32 port = 2; private int port_ ; /** * <code>optional uint32 port = 2;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint32 port = 2;</code> */ public int getPort() { return port_; } /** * <code>optional uint32 port = 2;</code> */ public Builder setPort(int value) { bitField0_ |= 0x00000002; port_ = value; onChanged(); return this; } /** * <code>optional uint32 port = 2;</code> */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000002); port_ = 0; onChanged(); return this; } // optional uint64 start_code = 3; private long startCode_ ; /** * <code>optional uint64 start_code = 3;</code> */ public boolean hasStartCode() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 start_code = 3;</code> */ public long getStartCode() { return startCode_; } /** * <code>optional uint64 start_code = 3;</code> */ public Builder setStartCode(long value) { bitField0_ |= 0x00000004; startCode_ = value; onChanged(); return this; } /** * <code>optional uint64 start_code = 3;</code> */ public Builder clearStartCode() { bitField0_ = (bitField0_ & ~0x00000004); startCode_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.ServerName) } static { defaultInstance = new ServerName(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ServerName) } public interface CoprocessorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); } /** * Protobuf type {@code hbase.pb.Coprocessor} */ public static final class Coprocessor extends com.google.protobuf.GeneratedMessage implements CoprocessorOrBuilder { // Use Coprocessor.newBuilder() to construct. private Coprocessor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private Coprocessor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final Coprocessor defaultInstance; public static Coprocessor getDefaultInstance() { return defaultInstance; } public Coprocessor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Coprocessor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); } public static com.google.protobuf.Parser<Coprocessor> PARSER = new com.google.protobuf.AbstractParser<Coprocessor>() { public Coprocessor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Coprocessor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<Coprocessor> getParserForType() { return PARSER; } private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { name_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.Coprocessor} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_Coprocessor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string name = 1; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.Coprocessor) } static { defaultInstance = new Coprocessor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.Coprocessor) } public interface NameStringPairOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); // required string value = 2; /** * <code>required string value = 2;</code> */ boolean hasValue(); /** * <code>required string value = 2;</code> */ java.lang.String getValue(); /** * <code>required string value = 2;</code> */ com.google.protobuf.ByteString getValueBytes(); } /** * Protobuf type {@code hbase.pb.NameStringPair} */ public static final class NameStringPair extends com.google.protobuf.GeneratedMessage implements NameStringPairOrBuilder { // Use NameStringPair.newBuilder() to construct. private NameStringPair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private NameStringPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final NameStringPair defaultInstance; public static NameStringPair getDefaultInstance() { return defaultInstance; } public NameStringPair getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NameStringPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; value_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); } public static com.google.protobuf.Parser<NameStringPair> PARSER = new com.google.protobuf.AbstractParser<NameStringPair>() { public NameStringPair parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new NameStringPair(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<NameStringPair> getParserForType() { return PARSER; } private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required string value = 2; public static final int VALUE_FIELD_NUMBER = 2; private java.lang.Object value_; /** * <code>required string value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string value = 2;</code> */ public java.lang.String getValue() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { value_ = s; } return s; } } /** * <code>required string value = 2;</code> */ public com.google.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { name_ = ""; value_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getValueBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getValueBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NameStringPair} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); value_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameStringPair_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasValue()) { bitField0_ |= 0x00000002; value_ = other.value_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } if (!hasValue()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string name = 1; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } // required string value = 2; private java.lang.Object value_ = ""; /** * <code>required string value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string value = 2;</code> */ public java.lang.String getValue() { java.lang.Object ref = value_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); value_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string value = 2;</code> */ public com.google.protobuf.ByteString getValueBytes() { java.lang.Object ref = value_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); value_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string value = 2;</code> */ public Builder setValue( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>required string value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } /** * <code>required string value = 2;</code> */ public Builder setValueBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.NameStringPair) } static { defaultInstance = new NameStringPair(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.NameStringPair) } public interface NameBytesPairOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); // optional bytes value = 2; /** * <code>optional bytes value = 2;</code> */ boolean hasValue(); /** * <code>optional bytes value = 2;</code> */ com.google.protobuf.ByteString getValue(); } /** * Protobuf type {@code hbase.pb.NameBytesPair} */ public static final class NameBytesPair extends com.google.protobuf.GeneratedMessage implements NameBytesPairOrBuilder { // Use NameBytesPair.newBuilder() to construct. private NameBytesPair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private NameBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final NameBytesPair defaultInstance; public static NameBytesPair getDefaultInstance() { return defaultInstance; } public NameBytesPair getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NameBytesPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; value_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); } public static com.google.protobuf.Parser<NameBytesPair> PARSER = new com.google.protobuf.AbstractParser<NameBytesPair>() { public NameBytesPair parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new NameBytesPair(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<NameBytesPair> getParserForType() { return PARSER; } private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bytes value = 2; public static final int VALUE_FIELD_NUMBER = 2; private com.google.protobuf.ByteString value_; /** * <code>optional bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes value = 2;</code> */ public com.google.protobuf.ByteString getValue() { return value_; } private void initFields() { name_ = ""; value_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NameBytesPair} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); value_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameBytesPair_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string name = 1; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } // optional bytes value = 2; private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes value = 2;</code> */ public com.google.protobuf.ByteString getValue() { return value_; } /** * <code>optional bytes value = 2;</code> */ public Builder setValue(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>optional bytes value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.NameBytesPair) } static { defaultInstance = new NameBytesPair(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.NameBytesPair) } public interface BytesBytesPairOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes first = 1; /** * <code>required bytes first = 1;</code> */ boolean hasFirst(); /** * <code>required bytes first = 1;</code> */ com.google.protobuf.ByteString getFirst(); // required bytes second = 2; /** * <code>required bytes second = 2;</code> */ boolean hasSecond(); /** * <code>required bytes second = 2;</code> */ com.google.protobuf.ByteString getSecond(); } /** * Protobuf type {@code hbase.pb.BytesBytesPair} */ public static final class BytesBytesPair extends com.google.protobuf.GeneratedMessage implements BytesBytesPairOrBuilder { // Use BytesBytesPair.newBuilder() to construct. private BytesBytesPair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private BytesBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BytesBytesPair defaultInstance; public static BytesBytesPair getDefaultInstance() { return defaultInstance; } public BytesBytesPair getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BytesBytesPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; first_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; second_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); } public static com.google.protobuf.Parser<BytesBytesPair> PARSER = new com.google.protobuf.AbstractParser<BytesBytesPair>() { public BytesBytesPair parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BytesBytesPair(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<BytesBytesPair> getParserForType() { return PARSER; } private int bitField0_; // required bytes first = 1; public static final int FIRST_FIELD_NUMBER = 1; private com.google.protobuf.ByteString first_; /** * <code>required bytes first = 1;</code> */ public boolean hasFirst() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes first = 1;</code> */ public com.google.protobuf.ByteString getFirst() { return first_; } // required bytes second = 2; public static final int SECOND_FIELD_NUMBER = 2; private com.google.protobuf.ByteString second_; /** * <code>required bytes second = 2;</code> */ public boolean hasSecond() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes second = 2;</code> */ public com.google.protobuf.ByteString getSecond() { return second_; } private void initFields() { first_ = com.google.protobuf.ByteString.EMPTY; second_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFirst()) { memoizedIsInitialized = 0; return false; } if (!hasSecond()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, first_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, second_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, first_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, second_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) obj; boolean result = true; result = result && (hasFirst() == other.hasFirst()); if (hasFirst()) { result = result && getFirst() .equals(other.getFirst()); } result = result && (hasSecond() == other.hasSecond()); if (hasSecond()) { result = result && getSecond() .equals(other.getSecond()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFirst()) { hash = (37 * hash) + FIRST_FIELD_NUMBER; hash = (53 * hash) + getFirst().hashCode(); } if (hasSecond()) { hash = (37 * hash) + SECOND_FIELD_NUMBER; hash = (53 * hash) + getSecond().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BytesBytesPair} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); first_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); second_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BytesBytesPair_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.first_ = first_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.second_ = second_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()) return this; if (other.hasFirst()) { setFirst(other.getFirst()); } if (other.hasSecond()) { setSecond(other.getSecond()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFirst()) { return false; } if (!hasSecond()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes first = 1; private com.google.protobuf.ByteString first_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes first = 1;</code> */ public boolean hasFirst() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes first = 1;</code> */ public com.google.protobuf.ByteString getFirst() { return first_; } /** * <code>required bytes first = 1;</code> */ public Builder setFirst(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; first_ = value; onChanged(); return this; } /** * <code>required bytes first = 1;</code> */ public Builder clearFirst() { bitField0_ = (bitField0_ & ~0x00000001); first_ = getDefaultInstance().getFirst(); onChanged(); return this; } // required bytes second = 2; private com.google.protobuf.ByteString second_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes second = 2;</code> */ public boolean hasSecond() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes second = 2;</code> */ public com.google.protobuf.ByteString getSecond() { return second_; } /** * <code>required bytes second = 2;</code> */ public Builder setSecond(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; second_ = value; onChanged(); return this; } /** * <code>required bytes second = 2;</code> */ public Builder clearSecond() { bitField0_ = (bitField0_ & ~0x00000002); second_ = getDefaultInstance().getSecond(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.BytesBytesPair) } static { defaultInstance = new BytesBytesPair(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.BytesBytesPair) } public interface NameInt64PairOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string name = 1; /** * <code>optional string name = 1;</code> */ boolean hasName(); /** * <code>optional string name = 1;</code> */ java.lang.String getName(); /** * <code>optional string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); // optional int64 value = 2; /** * <code>optional int64 value = 2;</code> */ boolean hasValue(); /** * <code>optional int64 value = 2;</code> */ long getValue(); } /** * Protobuf type {@code hbase.pb.NameInt64Pair} */ public static final class NameInt64Pair extends com.google.protobuf.GeneratedMessage implements NameInt64PairOrBuilder { // Use NameInt64Pair.newBuilder() to construct. private NameInt64Pair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private NameInt64Pair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final NameInt64Pair defaultInstance; public static NameInt64Pair getDefaultInstance() { return defaultInstance; } public NameInt64Pair getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NameInt64Pair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; value_ = input.readInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } public static com.google.protobuf.Parser<NameInt64Pair> PARSER = new com.google.protobuf.AbstractParser<NameInt64Pair>() { public NameInt64Pair parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new NameInt64Pair(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<NameInt64Pair> getParserForType() { return PARSER; } private int bitField0_; // optional string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>optional string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional int64 value = 2; public static final int VALUE_FIELD_NUMBER = 2; private long value_; /** * <code>optional int64 value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int64 value = 2;</code> */ public long getValue() { return value_; } private void initFields() { name_ = ""; value_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt64(2, value_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(2, value_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && (getValue() == other.getValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + hashLong(getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NameInt64Pair} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); value_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NameInt64Pair_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional string name = 1; private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } // optional int64 value = 2; private long value_ ; /** * <code>optional int64 value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int64 value = 2;</code> */ public long getValue() { return value_; } /** * <code>optional int64 value = 2;</code> */ public Builder setValue(long value) { bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>optional int64 value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.NameInt64Pair) } static { defaultInstance = new NameInt64Pair(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.NameInt64Pair) } public interface SnapshotDescriptionOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; /** * <code>required string name = 1;</code> */ boolean hasName(); /** * <code>required string name = 1;</code> */ java.lang.String getName(); /** * <code>required string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); // optional string table = 2; /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ boolean hasTable(); /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ java.lang.String getTable(); /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ com.google.protobuf.ByteString getTableBytes(); // optional int64 creation_time = 3 [default = 0]; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ boolean hasCreationTime(); /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ long getCreationTime(); // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ boolean hasType(); /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType(); // optional int32 version = 5; /** * <code>optional int32 version = 5;</code> */ boolean hasVersion(); /** * <code>optional int32 version = 5;</code> */ int getVersion(); // optional string owner = 6; /** * <code>optional string owner = 6;</code> */ boolean hasOwner(); /** * <code>optional string owner = 6;</code> */ java.lang.String getOwner(); /** * <code>optional string owner = 6;</code> */ com.google.protobuf.ByteString getOwnerBytes(); } /** * Protobuf type {@code hbase.pb.SnapshotDescription} * * <pre> ** * Description of the snapshot to take * </pre> */ public static final class SnapshotDescription extends com.google.protobuf.GeneratedMessage implements SnapshotDescriptionOrBuilder { // Use SnapshotDescription.newBuilder() to construct. private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SnapshotDescription defaultInstance; public static SnapshotDescription getDefaultInstance() { return defaultInstance; } public SnapshotDescription getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SnapshotDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; table_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; creationTime_ = input.readInt64(); break; } case 32: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; type_ = value; } break; } case 40: { bitField0_ |= 0x00000010; version_ = input.readInt32(); break; } case 50: { bitField0_ |= 0x00000020; owner_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); } public static com.google.protobuf.Parser<SnapshotDescription> PARSER = new com.google.protobuf.AbstractParser<SnapshotDescription>() { public SnapshotDescription parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SnapshotDescription(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SnapshotDescription> getParserForType() { return PARSER; } /** * Protobuf enum {@code hbase.pb.SnapshotDescription.Type} */ public enum Type implements com.google.protobuf.ProtocolMessageEnum { /** * <code>DISABLED = 0;</code> */ DISABLED(0, 0), /** * <code>FLUSH = 1;</code> */ FLUSH(1, 1), /** * <code>SKIPFLUSH = 2;</code> */ SKIPFLUSH(2, 2), ; /** * <code>DISABLED = 0;</code> */ public static final int DISABLED_VALUE = 0; /** * <code>FLUSH = 1;</code> */ public static final int FLUSH_VALUE = 1; /** * <code>SKIPFLUSH = 2;</code> */ public static final int SKIPFLUSH_VALUE = 2; public final int getNumber() { return value; } public static Type valueOf(int value) { switch (value) { case 0: return DISABLED; case 1: return FLUSH; case 2: return SKIPFLUSH; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Type>() { public Type findValueByNumber(int number) { return Type.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0); } private static final Type[] VALUES = values(); public static Type valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private Type(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type) } private int bitField0_; // required string name = 1; public static final int NAME_FIELD_NUMBER = 1; private java.lang.Object name_; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string table = 2; public static final int TABLE_FIELD_NUMBER = 2; private java.lang.Object table_; /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public java.lang.String getTable() { java.lang.Object ref = table_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { table_ = s; } return s; } } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public com.google.protobuf.ByteString getTableBytes() { java.lang.Object ref = table_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); table_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional int64 creation_time = 3 [default = 0]; public static final int CREATION_TIME_FIELD_NUMBER = 3; private long creationTime_; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public long getCreationTime() { return creationTime_; } // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; public static final int TYPE_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_; /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ public boolean hasType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { return type_; } // optional int32 version = 5; public static final int VERSION_FIELD_NUMBER = 5; private int version_; /** * <code>optional int32 version = 5;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional int32 version = 5;</code> */ public int getVersion() { return version_; } // optional string owner = 6; public static final int OWNER_FIELD_NUMBER = 6; private java.lang.Object owner_; /** * <code>optional string owner = 6;</code> */ public boolean hasOwner() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional string owner = 6;</code> */ public java.lang.String getOwner() { java.lang.Object ref = owner_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { owner_ = s; } return s; } } /** * <code>optional string owner = 6;</code> */ public com.google.protobuf.ByteString getOwnerBytes() { java.lang.Object ref = owner_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); owner_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { name_ = ""; table_ = ""; creationTime_ = 0L; type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; version_ = 0; owner_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getTableBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(3, creationTime_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeEnum(4, type_.getNumber()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeInt32(5, version_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(6, getOwnerBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getTableBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(3, creationTime_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, type_.getNumber()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(5, version_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, getOwnerBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasTable() == other.hasTable()); if (hasTable()) { result = result && getTable() .equals(other.getTable()); } result = result && (hasCreationTime() == other.hasCreationTime()); if (hasCreationTime()) { result = result && (getCreationTime() == other.getCreationTime()); } result = result && (hasType() == other.hasType()); if (hasType()) { result = result && (getType() == other.getType()); } result = result && (hasVersion() == other.hasVersion()); if (hasVersion()) { result = result && (getVersion() == other.getVersion()); } result = result && (hasOwner() == other.hasOwner()); if (hasOwner()) { result = result && getOwner() .equals(other.getOwner()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasTable()) { hash = (37 * hash) + TABLE_FIELD_NUMBER; hash = (53 * hash) + getTable().hashCode(); } if (hasCreationTime()) { hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getCreationTime()); } if (hasType()) { hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getType()); } if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion(); } if (hasOwner()) { hash = (37 * hash) + OWNER_FIELD_NUMBER; hash = (53 * hash) + getOwner().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.SnapshotDescription} * * <pre> ** * Description of the snapshot to take * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = ""; bitField0_ = (bitField0_ & ~0x00000001); table_ = ""; bitField0_ = (bitField0_ & ~0x00000002); creationTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; bitField0_ = (bitField0_ & ~0x00000008); version_ = 0; bitField0_ = (bitField0_ & ~0x00000010); owner_ = ""; bitField0_ = (bitField0_ & ~0x00000020); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.table_ = table_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.creationTime_ = creationTime_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.type_ = type_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.version_ = version_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.owner_ = owner_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasTable()) { bitField0_ |= 0x00000002; table_ = other.table_; onChanged(); } if (other.hasCreationTime()) { setCreationTime(other.getCreationTime()); } if (other.hasType()) { setType(other.getType()); } if (other.hasVersion()) { setVersion(other.getVersion()); } if (other.hasOwner()) { bitField0_ |= 0x00000020; owner_ = other.owner_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string name = 1; private java.lang.Object name_ = ""; /** * <code>required string name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } // optional string table = 2; private java.lang.Object table_ = ""; /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public boolean hasTable() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public java.lang.String getTable() { java.lang.Object ref = table_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); table_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public com.google.protobuf.ByteString getTableBytes() { java.lang.Object ref = table_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); table_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public Builder setTable( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; table_ = value; onChanged(); return this; } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public Builder clearTable() { bitField0_ = (bitField0_ & ~0x00000002); table_ = getDefaultInstance().getTable(); onChanged(); return this; } /** * <code>optional string table = 2;</code> * * <pre> * not needed for delete, but checked for in taking snapshot * </pre> */ public Builder setTableBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; table_ = value; onChanged(); return this; } // optional int64 creation_time = 3 [default = 0]; private long creationTime_ ; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public long getCreationTime() { return creationTime_; } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public Builder setCreationTime(long value) { bitField0_ |= 0x00000004; creationTime_ = value; onChanged(); return this; } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public Builder clearCreationTime() { bitField0_ = (bitField0_ & ~0x00000004); creationTime_ = 0L; onChanged(); return this; } // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ public boolean hasType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { return type_; } /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; type_ = value; onChanged(); return this; } /** * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code> */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; onChanged(); return this; } // optional int32 version = 5; private int version_ ; /** * <code>optional int32 version = 5;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional int32 version = 5;</code> */ public int getVersion() { return version_; } /** * <code>optional int32 version = 5;</code> */ public Builder setVersion(int value) { bitField0_ |= 0x00000010; version_ = value; onChanged(); return this; } /** * <code>optional int32 version = 5;</code> */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000010); version_ = 0; onChanged(); return this; } // optional string owner = 6; private java.lang.Object owner_ = ""; /** * <code>optional string owner = 6;</code> */ public boolean hasOwner() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional string owner = 6;</code> */ public java.lang.String getOwner() { java.lang.Object ref = owner_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); owner_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string owner = 6;</code> */ public com.google.protobuf.ByteString getOwnerBytes() { java.lang.Object ref = owner_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); owner_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string owner = 6;</code> */ public Builder setOwner( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; owner_ = value; onChanged(); return this; } /** * <code>optional string owner = 6;</code> */ public Builder clearOwner() { bitField0_ = (bitField0_ & ~0x00000020); owner_ = getDefaultInstance().getOwner(); onChanged(); return this; } /** * <code>optional string owner = 6;</code> */ public Builder setOwnerBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; owner_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription) } static { defaultInstance = new SnapshotDescription(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription) } public interface ProcedureDescriptionOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string signature = 1; /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ boolean hasSignature(); /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ java.lang.String getSignature(); /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ com.google.protobuf.ByteString getSignatureBytes(); // optional string instance = 2; /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ boolean hasInstance(); /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ java.lang.String getInstance(); /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ com.google.protobuf.ByteString getInstanceBytes(); // optional int64 creation_time = 3 [default = 0]; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ boolean hasCreationTime(); /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ long getCreationTime(); // repeated .hbase.pb.NameStringPair configuration = 4; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.ProcedureDescription} * * <pre> ** * Description of the distributed procedure to take * </pre> */ public static final class ProcedureDescription extends com.google.protobuf.GeneratedMessage implements ProcedureDescriptionOrBuilder { // Use ProcedureDescription.newBuilder() to construct. private ProcedureDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ProcedureDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ProcedureDescription defaultInstance; public static ProcedureDescription getDefaultInstance() { return defaultInstance; } public ProcedureDescription getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ProcedureDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; signature_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; instance_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; creationTime_ = input.readInt64(); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000008; } configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder.class); } public static com.google.protobuf.Parser<ProcedureDescription> PARSER = new com.google.protobuf.AbstractParser<ProcedureDescription>() { public ProcedureDescription parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ProcedureDescription(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ProcedureDescription> getParserForType() { return PARSER; } private int bitField0_; // required string signature = 1; public static final int SIGNATURE_FIELD_NUMBER = 1; private java.lang.Object signature_; /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public boolean hasSignature() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public java.lang.String getSignature() { java.lang.Object ref = signature_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { signature_ = s; } return s; } } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public com.google.protobuf.ByteString getSignatureBytes() { java.lang.Object ref = signature_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); signature_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string instance = 2; public static final int INSTANCE_FIELD_NUMBER = 2; private java.lang.Object instance_; /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public java.lang.String getInstance() { java.lang.Object ref = instance_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { instance_ = s; } return s; } } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public com.google.protobuf.ByteString getInstanceBytes() { java.lang.Object ref = instance_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instance_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional int64 creation_time = 3 [default = 0]; public static final int CREATION_TIME_FIELD_NUMBER = 3; private long creationTime_; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public long getCreationTime() { return creationTime_; } // repeated .hbase.pb.NameStringPair configuration = 4; public static final int CONFIGURATION_FIELD_NUMBER = 4; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private void initFields() { signature_ = ""; instance_ = ""; creationTime_ = 0L; configuration_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasSignature()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getSignatureBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getInstanceBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(3, creationTime_); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(4, configuration_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getSignatureBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getInstanceBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(3, creationTime_); } for (int i = 0; i < configuration_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, configuration_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription) obj; boolean result = true; result = result && (hasSignature() == other.hasSignature()); if (hasSignature()) { result = result && getSignature() .equals(other.getSignature()); } result = result && (hasInstance() == other.hasInstance()); if (hasInstance()) { result = result && getInstance() .equals(other.getInstance()); } result = result && (hasCreationTime() == other.hasCreationTime()); if (hasCreationTime()) { result = result && (getCreationTime() == other.getCreationTime()); } result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSignature()) { hash = (37 * hash) + SIGNATURE_FIELD_NUMBER; hash = (53 * hash) + getSignature().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } if (hasCreationTime()) { hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getCreationTime()); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ProcedureDescription} * * <pre> ** * Description of the distributed procedure to take * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getConfigurationFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); signature_ = ""; bitField0_ = (bitField0_ & ~0x00000001); instance_ = ""; bitField0_ = (bitField0_ & ~0x00000002); creationTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); } else { configurationBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_ProcedureDescription_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.signature_ = signature_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.instance_ = instance_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.creationTime_ = creationTime_; if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000008); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) return this; if (other.hasSignature()) { bitField0_ |= 0x00000001; signature_ = other.signature_; onChanged(); } if (other.hasInstance()) { bitField0_ |= 0x00000002; instance_ = other.instance_; onChanged(); } if (other.hasCreationTime()) { setCreationTime(other.getCreationTime()); } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000008); configurationBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasSignature()) { return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string signature = 1; private java.lang.Object signature_ = ""; /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public boolean hasSignature() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public java.lang.String getSignature() { java.lang.Object ref = signature_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); signature_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public com.google.protobuf.ByteString getSignatureBytes() { java.lang.Object ref = signature_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); signature_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public Builder setSignature( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; signature_ = value; onChanged(); return this; } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public Builder clearSignature() { bitField0_ = (bitField0_ & ~0x00000001); signature_ = getDefaultInstance().getSignature(); onChanged(); return this; } /** * <code>required string signature = 1;</code> * * <pre> * the unique signature of the procedure * </pre> */ public Builder setSignatureBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; signature_ = value; onChanged(); return this; } // optional string instance = 2; private java.lang.Object instance_ = ""; /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public java.lang.String getInstance() { java.lang.Object ref = instance_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); instance_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public com.google.protobuf.ByteString getInstanceBytes() { java.lang.Object ref = instance_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); instance_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public Builder setInstance( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; instance_ = value; onChanged(); return this; } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = getDefaultInstance().getInstance(); onChanged(); return this; } /** * <code>optional string instance = 2;</code> * * <pre> * the procedure instance name * </pre> */ public Builder setInstanceBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; instance_ = value; onChanged(); return this; } // optional int64 creation_time = 3 [default = 0]; private long creationTime_ ; /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public boolean hasCreationTime() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public long getCreationTime() { return creationTime_; } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public Builder setCreationTime(long value) { bitField0_ |= 0x00000004; creationTime_ = value; onChanged(); return this; } /** * <code>optional int64 creation_time = 3 [default = 0];</code> */ public Builder clearCreationTime() { bitField0_ = (bitField0_ & ~0x00000004); creationTime_ = 0L; onChanged(); return this; } // repeated .hbase.pb.NameStringPair configuration = 4; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000008; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); super.addAll(values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.ProcedureDescription) } static { defaultInstance = new ProcedureDescription(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ProcedureDescription) } public interface EmptyMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.EmptyMsg} */ public static final class EmptyMsg extends com.google.protobuf.GeneratedMessage implements EmptyMsgOrBuilder { // Use EmptyMsg.newBuilder() to construct. private EmptyMsg(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private EmptyMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final EmptyMsg defaultInstance; public static EmptyMsg getDefaultInstance() { return defaultInstance; } public EmptyMsg getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EmptyMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); } public static com.google.protobuf.Parser<EmptyMsg> PARSER = new com.google.protobuf.AbstractParser<EmptyMsg>() { public EmptyMsg parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new EmptyMsg(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<EmptyMsg> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.EmptyMsg} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_EmptyMsg_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.EmptyMsg) } static { defaultInstance = new EmptyMsg(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.EmptyMsg) } public interface LongMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int64 long_msg = 1; /** * <code>required int64 long_msg = 1;</code> */ boolean hasLongMsg(); /** * <code>required int64 long_msg = 1;</code> */ long getLongMsg(); } /** * Protobuf type {@code hbase.pb.LongMsg} */ public static final class LongMsg extends com.google.protobuf.GeneratedMessage implements LongMsgOrBuilder { // Use LongMsg.newBuilder() to construct. private LongMsg(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private LongMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final LongMsg defaultInstance; public static LongMsg getDefaultInstance() { return defaultInstance; } public LongMsg getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LongMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; longMsg_ = input.readInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.Builder.class); } public static com.google.protobuf.Parser<LongMsg> PARSER = new com.google.protobuf.AbstractParser<LongMsg>() { public LongMsg parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LongMsg(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<LongMsg> getParserForType() { return PARSER; } private int bitField0_; // required int64 long_msg = 1; public static final int LONG_MSG_FIELD_NUMBER = 1; private long longMsg_; /** * <code>required int64 long_msg = 1;</code> */ public boolean hasLongMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 long_msg = 1;</code> */ public long getLongMsg() { return longMsg_; } private void initFields() { longMsg_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLongMsg()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, longMsg_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, longMsg_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) obj; boolean result = true; result = result && (hasLongMsg() == other.hasLongMsg()); if (hasLongMsg()) { result = result && (getLongMsg() == other.getLongMsg()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLongMsg()) { hash = (37 * hash) + LONG_MSG_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLongMsg()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.LongMsg} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); longMsg_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_LongMsg_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.longMsg_ = longMsg_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance()) return this; if (other.hasLongMsg()) { setLongMsg(other.getLongMsg()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasLongMsg()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required int64 long_msg = 1; private long longMsg_ ; /** * <code>required int64 long_msg = 1;</code> */ public boolean hasLongMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 long_msg = 1;</code> */ public long getLongMsg() { return longMsg_; } /** * <code>required int64 long_msg = 1;</code> */ public Builder setLongMsg(long value) { bitField0_ |= 0x00000001; longMsg_ = value; onChanged(); return this; } /** * <code>required int64 long_msg = 1;</code> */ public Builder clearLongMsg() { bitField0_ = (bitField0_ & ~0x00000001); longMsg_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.LongMsg) } static { defaultInstance = new LongMsg(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.LongMsg) } public interface DoubleMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { // required double double_msg = 1; /** * <code>required double double_msg = 1;</code> */ boolean hasDoubleMsg(); /** * <code>required double double_msg = 1;</code> */ double getDoubleMsg(); } /** * Protobuf type {@code hbase.pb.DoubleMsg} */ public static final class DoubleMsg extends com.google.protobuf.GeneratedMessage implements DoubleMsgOrBuilder { // Use DoubleMsg.newBuilder() to construct. private DoubleMsg(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DoubleMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DoubleMsg defaultInstance; public static DoubleMsg getDefaultInstance() { return defaultInstance; } public DoubleMsg getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DoubleMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 9: { bitField0_ |= 0x00000001; doubleMsg_ = input.readDouble(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.Builder.class); } public static com.google.protobuf.Parser<DoubleMsg> PARSER = new com.google.protobuf.AbstractParser<DoubleMsg>() { public DoubleMsg parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DoubleMsg(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DoubleMsg> getParserForType() { return PARSER; } private int bitField0_; // required double double_msg = 1; public static final int DOUBLE_MSG_FIELD_NUMBER = 1; private double doubleMsg_; /** * <code>required double double_msg = 1;</code> */ public boolean hasDoubleMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required double double_msg = 1;</code> */ public double getDoubleMsg() { return doubleMsg_; } private void initFields() { doubleMsg_ = 0D; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasDoubleMsg()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeDouble(1, doubleMsg_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(1, doubleMsg_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg) obj; boolean result = true; result = result && (hasDoubleMsg() == other.hasDoubleMsg()); if (hasDoubleMsg()) { result = result && (Double.doubleToLongBits(getDoubleMsg()) == Double.doubleToLongBits(other.getDoubleMsg())); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDoubleMsg()) { hash = (37 * hash) + DOUBLE_MSG_FIELD_NUMBER; hash = (53 * hash) + hashLong( Double.doubleToLongBits(getDoubleMsg())); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.DoubleMsg} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); doubleMsg_ = 0D; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_DoubleMsg_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.doubleMsg_ = doubleMsg_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg.getDefaultInstance()) return this; if (other.hasDoubleMsg()) { setDoubleMsg(other.getDoubleMsg()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasDoubleMsg()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.DoubleMsg) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required double double_msg = 1; private double doubleMsg_ ; /** * <code>required double double_msg = 1;</code> */ public boolean hasDoubleMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required double double_msg = 1;</code> */ public double getDoubleMsg() { return doubleMsg_; } /** * <code>required double double_msg = 1;</code> */ public Builder setDoubleMsg(double value) { bitField0_ |= 0x00000001; doubleMsg_ = value; onChanged(); return this; } /** * <code>required double double_msg = 1;</code> */ public Builder clearDoubleMsg() { bitField0_ = (bitField0_ & ~0x00000001); doubleMsg_ = 0D; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.DoubleMsg) } static { defaultInstance = new DoubleMsg(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.DoubleMsg) } public interface BigDecimalMsgOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes bigdecimal_msg = 1; /** * <code>required bytes bigdecimal_msg = 1;</code> */ boolean hasBigdecimalMsg(); /** * <code>required bytes bigdecimal_msg = 1;</code> */ com.google.protobuf.ByteString getBigdecimalMsg(); } /** * Protobuf type {@code hbase.pb.BigDecimalMsg} */ public static final class BigDecimalMsg extends com.google.protobuf.GeneratedMessage implements BigDecimalMsgOrBuilder { // Use BigDecimalMsg.newBuilder() to construct. private BigDecimalMsg(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private BigDecimalMsg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BigDecimalMsg defaultInstance; public static BigDecimalMsg getDefaultInstance() { return defaultInstance; } public BigDecimalMsg getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BigDecimalMsg( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; bigdecimalMsg_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); } public static com.google.protobuf.Parser<BigDecimalMsg> PARSER = new com.google.protobuf.AbstractParser<BigDecimalMsg>() { public BigDecimalMsg parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BigDecimalMsg(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<BigDecimalMsg> getParserForType() { return PARSER; } private int bitField0_; // required bytes bigdecimal_msg = 1; public static final int BIGDECIMAL_MSG_FIELD_NUMBER = 1; private com.google.protobuf.ByteString bigdecimalMsg_; /** * <code>required bytes bigdecimal_msg = 1;</code> */ public boolean hasBigdecimalMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public com.google.protobuf.ByteString getBigdecimalMsg() { return bigdecimalMsg_; } private void initFields() { bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasBigdecimalMsg()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, bigdecimalMsg_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, bigdecimalMsg_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg) obj; boolean result = true; result = result && (hasBigdecimalMsg() == other.hasBigdecimalMsg()); if (hasBigdecimalMsg()) { result = result && getBigdecimalMsg() .equals(other.getBigdecimalMsg()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBigdecimalMsg()) { hash = (37 * hash) + BIGDECIMAL_MSG_FIELD_NUMBER; hash = (53 * hash) + getBigdecimalMsg().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BigDecimalMsg} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsgOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_BigDecimalMsg_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.bigdecimalMsg_ = bigdecimalMsg_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.getDefaultInstance()) return this; if (other.hasBigdecimalMsg()) { setBigdecimalMsg(other.getBigdecimalMsg()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasBigdecimalMsg()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes bigdecimal_msg = 1; private com.google.protobuf.ByteString bigdecimalMsg_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes bigdecimal_msg = 1;</code> */ public boolean hasBigdecimalMsg() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public com.google.protobuf.ByteString getBigdecimalMsg() { return bigdecimalMsg_; } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public Builder setBigdecimalMsg(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; bigdecimalMsg_ = value; onChanged(); return this; } /** * <code>required bytes bigdecimal_msg = 1;</code> */ public Builder clearBigdecimalMsg() { bitField0_ = (bitField0_ & ~0x00000001); bigdecimalMsg_ = getDefaultInstance().getBigdecimalMsg(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.BigDecimalMsg) } static { defaultInstance = new BigDecimalMsg(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.BigDecimalMsg) } public interface UUIDOrBuilder extends com.google.protobuf.MessageOrBuilder { // required uint64 least_sig_bits = 1; /** * <code>required uint64 least_sig_bits = 1;</code> */ boolean hasLeastSigBits(); /** * <code>required uint64 least_sig_bits = 1;</code> */ long getLeastSigBits(); // required uint64 most_sig_bits = 2; /** * <code>required uint64 most_sig_bits = 2;</code> */ boolean hasMostSigBits(); /** * <code>required uint64 most_sig_bits = 2;</code> */ long getMostSigBits(); } /** * Protobuf type {@code hbase.pb.UUID} */ public static final class UUID extends com.google.protobuf.GeneratedMessage implements UUIDOrBuilder { // Use UUID.newBuilder() to construct. private UUID(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private UUID(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final UUID defaultInstance; public static UUID getDefaultInstance() { return defaultInstance; } public UUID getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UUID( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; leastSigBits_ = input.readUInt64(); break; } case 16: { bitField0_ |= 0x00000002; mostSigBits_ = input.readUInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder.class); } public static com.google.protobuf.Parser<UUID> PARSER = new com.google.protobuf.AbstractParser<UUID>() { public UUID parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UUID(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<UUID> getParserForType() { return PARSER; } private int bitField0_; // required uint64 least_sig_bits = 1; public static final int LEAST_SIG_BITS_FIELD_NUMBER = 1; private long leastSigBits_; /** * <code>required uint64 least_sig_bits = 1;</code> */ public boolean hasLeastSigBits() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 least_sig_bits = 1;</code> */ public long getLeastSigBits() { return leastSigBits_; } // required uint64 most_sig_bits = 2; public static final int MOST_SIG_BITS_FIELD_NUMBER = 2; private long mostSigBits_; /** * <code>required uint64 most_sig_bits = 2;</code> */ public boolean hasMostSigBits() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required uint64 most_sig_bits = 2;</code> */ public long getMostSigBits() { return mostSigBits_; } private void initFields() { leastSigBits_ = 0L; mostSigBits_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLeastSigBits()) { memoizedIsInitialized = 0; return false; } if (!hasMostSigBits()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(1, leastSigBits_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, mostSigBits_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, leastSigBits_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, mostSigBits_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID) obj; boolean result = true; result = result && (hasLeastSigBits() == other.hasLeastSigBits()); if (hasLeastSigBits()) { result = result && (getLeastSigBits() == other.getLeastSigBits()); } result = result && (hasMostSigBits() == other.hasMostSigBits()); if (hasMostSigBits()) { result = result && (getMostSigBits() == other.getMostSigBits()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLeastSigBits()) { hash = (37 * hash) + LEAST_SIG_BITS_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLeastSigBits()); } if (hasMostSigBits()) { hash = (37 * hash) + MOST_SIG_BITS_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMostSigBits()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.UUID} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); leastSigBits_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); mostSigBits_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_UUID_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.leastSigBits_ = leastSigBits_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.mostSigBits_ = mostSigBits_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) return this; if (other.hasLeastSigBits()) { setLeastSigBits(other.getLeastSigBits()); } if (other.hasMostSigBits()) { setMostSigBits(other.getMostSigBits()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasLeastSigBits()) { return false; } if (!hasMostSigBits()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required uint64 least_sig_bits = 1; private long leastSigBits_ ; /** * <code>required uint64 least_sig_bits = 1;</code> */ public boolean hasLeastSigBits() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required uint64 least_sig_bits = 1;</code> */ public long getLeastSigBits() { return leastSigBits_; } /** * <code>required uint64 least_sig_bits = 1;</code> */ public Builder setLeastSigBits(long value) { bitField0_ |= 0x00000001; leastSigBits_ = value; onChanged(); return this; } /** * <code>required uint64 least_sig_bits = 1;</code> */ public Builder clearLeastSigBits() { bitField0_ = (bitField0_ & ~0x00000001); leastSigBits_ = 0L; onChanged(); return this; } // required uint64 most_sig_bits = 2; private long mostSigBits_ ; /** * <code>required uint64 most_sig_bits = 2;</code> */ public boolean hasMostSigBits() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required uint64 most_sig_bits = 2;</code> */ public long getMostSigBits() { return mostSigBits_; } /** * <code>required uint64 most_sig_bits = 2;</code> */ public Builder setMostSigBits(long value) { bitField0_ |= 0x00000002; mostSigBits_ = value; onChanged(); return this; } /** * <code>required uint64 most_sig_bits = 2;</code> */ public Builder clearMostSigBits() { bitField0_ = (bitField0_ & ~0x00000002); mostSigBits_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.UUID) } static { defaultInstance = new UUID(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.UUID) } public interface NamespaceDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes name = 1; /** * <code>required bytes name = 1;</code> */ boolean hasName(); /** * <code>required bytes name = 1;</code> */ com.google.protobuf.ByteString getName(); // repeated .hbase.pb.NameStringPair configuration = 2; /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ int getConfigurationCount(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList(); /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.NamespaceDescriptor} */ public static final class NamespaceDescriptor extends com.google.protobuf.GeneratedMessage implements NamespaceDescriptorOrBuilder { // Use NamespaceDescriptor.newBuilder() to construct. private NamespaceDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private NamespaceDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final NamespaceDescriptor defaultInstance; public static NamespaceDescriptor getDefaultInstance() { return defaultInstance; } public NamespaceDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private NamespaceDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; name_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); mutable_bitField0_ |= 0x00000002; } configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder.class); } public static com.google.protobuf.Parser<NamespaceDescriptor> PARSER = new com.google.protobuf.AbstractParser<NamespaceDescriptor>() { public NamespaceDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new NamespaceDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<NamespaceDescriptor> getParserForType() { return PARSER; } private int bitField0_; // required bytes name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString name_; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public com.google.protobuf.ByteString getName() { return name_; } // repeated .hbase.pb.NameStringPair configuration = 2; public static final int CONFIGURATION_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { return configuration_; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public int getConfigurationCount() { return configuration_.size(); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { return configuration_.get(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { return configuration_.get(index); } private void initFields() { name_ = com.google.protobuf.ByteString.EMPTY; configuration_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, name_); } for (int i = 0; i < configuration_.size(); i++) { output.writeMessage(2, configuration_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, name_); } for (int i = 0; i < configuration_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, configuration_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && getConfigurationList() .equals(other.getConfigurationList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (getConfigurationCount() > 0) { hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; hash = (53 * hash) + getConfigurationList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.NamespaceDescriptor} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getConfigurationFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); name_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { configurationBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_NamespaceDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.name_ = name_; if (configurationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = java.util.Collections.unmodifiableList(configuration_); bitField0_ = (bitField0_ & ~0x00000002); } result.configuration_ = configuration_; } else { result.configuration_ = configurationBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) return this; if (other.hasName()) { setName(other.getName()); } if (configurationBuilder_ == null) { if (!other.configuration_.isEmpty()) { if (configuration_.isEmpty()) { configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureConfigurationIsMutable(); configuration_.addAll(other.configuration_); } onChanged(); } } else { if (!other.configuration_.isEmpty()) { if (configurationBuilder_.isEmpty()) { configurationBuilder_.dispose(); configurationBuilder_ = null; configuration_ = other.configuration_; bitField0_ = (bitField0_ & ~0x00000002); configurationBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getConfigurationFieldBuilder() : null; } else { configurationBuilder_.addAllMessages(other.configuration_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasName()) { return false; } for (int i = 0; i < getConfigurationCount(); i++) { if (!getConfiguration(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes name = 1; private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes name = 1;</code> */ public boolean hasName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes name = 1;</code> */ public com.google.protobuf.ByteString getName() { return name_; } /** * <code>required bytes name = 1;</code> */ public Builder setName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; name_ = value; onChanged(); return this; } /** * <code>required bytes name = 1;</code> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000001); name_ = getDefaultInstance().getName(); onChanged(); return this; } // repeated .hbase.pb.NameStringPair configuration = 2; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = java.util.Collections.emptyList(); private void ensureConfigurationIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { if (configurationBuilder_ == null) { return java.util.Collections.unmodifiableList(configuration_); } else { return configurationBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public int getConfigurationCount() { if (configurationBuilder_ == null) { return configuration_.size(); } else { return configurationBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.set(index, value); onChanged(); } else { configurationBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.set(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(value); onChanged(); } else { configurationBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (configurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConfigurationIsMutable(); configuration_.add(index, value); onChanged(); } else { configurationBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.add(index, builderForValue.build()); onChanged(); } else { configurationBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); super.addAll(values, configuration_); onChanged(); } else { configurationBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder clearConfiguration() { if (configurationBuilder_ == null) { configuration_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { configurationBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public Builder removeConfiguration(int index) { if (configurationBuilder_ == null) { ensureConfigurationIsMutable(); configuration_.remove(index); onChanged(); } else { configurationBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( int index) { return getConfigurationFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( int index) { if (configurationBuilder_ == null) { return configuration_.get(index); } else { return configurationBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList() { if (configurationBuilder_ != null) { return configurationBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(configuration_); } } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { return getConfigurationFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( int index) { return getConfigurationFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameStringPair configuration = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList() { return getConfigurationFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder() { if (configurationBuilder_ == null) { configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( configuration_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); configuration_ = null; } return configurationBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.NamespaceDescriptor) } static { defaultInstance = new NamespaceDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.NamespaceDescriptor) } public interface VersionInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string version = 1; /** * <code>required string version = 1;</code> */ boolean hasVersion(); /** * <code>required string version = 1;</code> */ java.lang.String getVersion(); /** * <code>required string version = 1;</code> */ com.google.protobuf.ByteString getVersionBytes(); // required string url = 2; /** * <code>required string url = 2;</code> */ boolean hasUrl(); /** * <code>required string url = 2;</code> */ java.lang.String getUrl(); /** * <code>required string url = 2;</code> */ com.google.protobuf.ByteString getUrlBytes(); // required string revision = 3; /** * <code>required string revision = 3;</code> */ boolean hasRevision(); /** * <code>required string revision = 3;</code> */ java.lang.String getRevision(); /** * <code>required string revision = 3;</code> */ com.google.protobuf.ByteString getRevisionBytes(); // required string user = 4; /** * <code>required string user = 4;</code> */ boolean hasUser(); /** * <code>required string user = 4;</code> */ java.lang.String getUser(); /** * <code>required string user = 4;</code> */ com.google.protobuf.ByteString getUserBytes(); // required string date = 5; /** * <code>required string date = 5;</code> */ boolean hasDate(); /** * <code>required string date = 5;</code> */ java.lang.String getDate(); /** * <code>required string date = 5;</code> */ com.google.protobuf.ByteString getDateBytes(); // required string src_checksum = 6; /** * <code>required string src_checksum = 6;</code> */ boolean hasSrcChecksum(); /** * <code>required string src_checksum = 6;</code> */ java.lang.String getSrcChecksum(); /** * <code>required string src_checksum = 6;</code> */ com.google.protobuf.ByteString getSrcChecksumBytes(); // optional uint32 version_major = 7; /** * <code>optional uint32 version_major = 7;</code> */ boolean hasVersionMajor(); /** * <code>optional uint32 version_major = 7;</code> */ int getVersionMajor(); // optional uint32 version_minor = 8; /** * <code>optional uint32 version_minor = 8;</code> */ boolean hasVersionMinor(); /** * <code>optional uint32 version_minor = 8;</code> */ int getVersionMinor(); } /** * Protobuf type {@code hbase.pb.VersionInfo} * * <pre> * Rpc client version info proto. Included in ConnectionHeader on connection setup * </pre> */ public static final class VersionInfo extends com.google.protobuf.GeneratedMessage implements VersionInfoOrBuilder { // Use VersionInfo.newBuilder() to construct. private VersionInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private VersionInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final VersionInfo defaultInstance; public static VersionInfo getDefaultInstance() { return defaultInstance; } public VersionInfo getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private VersionInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; version_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; url_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; revision_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000008; user_ = input.readBytes(); break; } case 42: { bitField0_ |= 0x00000010; date_ = input.readBytes(); break; } case 50: { bitField0_ |= 0x00000020; srcChecksum_ = input.readBytes(); break; } case 56: { bitField0_ |= 0x00000040; versionMajor_ = input.readUInt32(); break; } case 64: { bitField0_ |= 0x00000080; versionMinor_ = input.readUInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder.class); } public static com.google.protobuf.Parser<VersionInfo> PARSER = new com.google.protobuf.AbstractParser<VersionInfo>() { public VersionInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new VersionInfo(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<VersionInfo> getParserForType() { return PARSER; } private int bitField0_; // required string version = 1; public static final int VERSION_FIELD_NUMBER = 1; private java.lang.Object version_; /** * <code>required string version = 1;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string version = 1;</code> */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { version_ = s; } return s; } } /** * <code>required string version = 1;</code> */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required string url = 2; public static final int URL_FIELD_NUMBER = 2; private java.lang.Object url_; /** * <code>required string url = 2;</code> */ public boolean hasUrl() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string url = 2;</code> */ public java.lang.String getUrl() { java.lang.Object ref = url_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { url_ = s; } return s; } } /** * <code>required string url = 2;</code> */ public com.google.protobuf.ByteString getUrlBytes() { java.lang.Object ref = url_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); url_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required string revision = 3; public static final int REVISION_FIELD_NUMBER = 3; private java.lang.Object revision_; /** * <code>required string revision = 3;</code> */ public boolean hasRevision() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string revision = 3;</code> */ public java.lang.String getRevision() { java.lang.Object ref = revision_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { revision_ = s; } return s; } } /** * <code>required string revision = 3;</code> */ public com.google.protobuf.ByteString getRevisionBytes() { java.lang.Object ref = revision_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); revision_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required string user = 4; public static final int USER_FIELD_NUMBER = 4; private java.lang.Object user_; /** * <code>required string user = 4;</code> */ public boolean hasUser() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required string user = 4;</code> */ public java.lang.String getUser() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { user_ = s; } return s; } } /** * <code>required string user = 4;</code> */ public com.google.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required string date = 5; public static final int DATE_FIELD_NUMBER = 5; private java.lang.Object date_; /** * <code>required string date = 5;</code> */ public boolean hasDate() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required string date = 5;</code> */ public java.lang.String getDate() { java.lang.Object ref = date_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { date_ = s; } return s; } } /** * <code>required string date = 5;</code> */ public com.google.protobuf.ByteString getDateBytes() { java.lang.Object ref = date_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); date_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required string src_checksum = 6; public static final int SRC_CHECKSUM_FIELD_NUMBER = 6; private java.lang.Object srcChecksum_; /** * <code>required string src_checksum = 6;</code> */ public boolean hasSrcChecksum() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>required string src_checksum = 6;</code> */ public java.lang.String getSrcChecksum() { java.lang.Object ref = srcChecksum_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { srcChecksum_ = s; } return s; } } /** * <code>required string src_checksum = 6;</code> */ public com.google.protobuf.ByteString getSrcChecksumBytes() { java.lang.Object ref = srcChecksum_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); srcChecksum_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional uint32 version_major = 7; public static final int VERSION_MAJOR_FIELD_NUMBER = 7; private int versionMajor_; /** * <code>optional uint32 version_major = 7;</code> */ public boolean hasVersionMajor() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 version_major = 7;</code> */ public int getVersionMajor() { return versionMajor_; } // optional uint32 version_minor = 8; public static final int VERSION_MINOR_FIELD_NUMBER = 8; private int versionMinor_; /** * <code>optional uint32 version_minor = 8;</code> */ public boolean hasVersionMinor() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 version_minor = 8;</code> */ public int getVersionMinor() { return versionMinor_; } private void initFields() { version_ = ""; url_ = ""; revision_ = ""; user_ = ""; date_ = ""; srcChecksum_ = ""; versionMajor_ = 0; versionMinor_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasVersion()) { memoizedIsInitialized = 0; return false; } if (!hasUrl()) { memoizedIsInitialized = 0; return false; } if (!hasRevision()) { memoizedIsInitialized = 0; return false; } if (!hasUser()) { memoizedIsInitialized = 0; return false; } if (!hasDate()) { memoizedIsInitialized = 0; return false; } if (!hasSrcChecksum()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getVersionBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getUrlBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getRevisionBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getUserBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getDateBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(6, getSrcChecksumBytes()); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(7, versionMajor_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt32(8, versionMinor_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getVersionBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getUrlBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getRevisionBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, getUserBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, getDateBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, getSrcChecksumBytes()); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(7, versionMajor_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(8, versionMinor_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo) obj; boolean result = true; result = result && (hasVersion() == other.hasVersion()); if (hasVersion()) { result = result && getVersion() .equals(other.getVersion()); } result = result && (hasUrl() == other.hasUrl()); if (hasUrl()) { result = result && getUrl() .equals(other.getUrl()); } result = result && (hasRevision() == other.hasRevision()); if (hasRevision()) { result = result && getRevision() .equals(other.getRevision()); } result = result && (hasUser() == other.hasUser()); if (hasUser()) { result = result && getUser() .equals(other.getUser()); } result = result && (hasDate() == other.hasDate()); if (hasDate()) { result = result && getDate() .equals(other.getDate()); } result = result && (hasSrcChecksum() == other.hasSrcChecksum()); if (hasSrcChecksum()) { result = result && getSrcChecksum() .equals(other.getSrcChecksum()); } result = result && (hasVersionMajor() == other.hasVersionMajor()); if (hasVersionMajor()) { result = result && (getVersionMajor() == other.getVersionMajor()); } result = result && (hasVersionMinor() == other.hasVersionMinor()); if (hasVersionMinor()) { result = result && (getVersionMinor() == other.getVersionMinor()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); } if (hasUrl()) { hash = (37 * hash) + URL_FIELD_NUMBER; hash = (53 * hash) + getUrl().hashCode(); } if (hasRevision()) { hash = (37 * hash) + REVISION_FIELD_NUMBER; hash = (53 * hash) + getRevision().hashCode(); } if (hasUser()) { hash = (37 * hash) + USER_FIELD_NUMBER; hash = (53 * hash) + getUser().hashCode(); } if (hasDate()) { hash = (37 * hash) + DATE_FIELD_NUMBER; hash = (53 * hash) + getDate().hashCode(); } if (hasSrcChecksum()) { hash = (37 * hash) + SRC_CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getSrcChecksum().hashCode(); } if (hasVersionMajor()) { hash = (37 * hash) + VERSION_MAJOR_FIELD_NUMBER; hash = (53 * hash) + getVersionMajor(); } if (hasVersionMinor()) { hash = (37 * hash) + VERSION_MINOR_FIELD_NUMBER; hash = (53 * hash) + getVersionMinor(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.VersionInfo} * * <pre> * Rpc client version info proto. Included in ConnectionHeader on connection setup * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); version_ = ""; bitField0_ = (bitField0_ & ~0x00000001); url_ = ""; bitField0_ = (bitField0_ & ~0x00000002); revision_ = ""; bitField0_ = (bitField0_ & ~0x00000004); user_ = ""; bitField0_ = (bitField0_ & ~0x00000008); date_ = ""; bitField0_ = (bitField0_ & ~0x00000010); srcChecksum_ = ""; bitField0_ = (bitField0_ & ~0x00000020); versionMajor_ = 0; bitField0_ = (bitField0_ & ~0x00000040); versionMinor_ = 0; bitField0_ = (bitField0_ & ~0x00000080); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_VersionInfo_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.version_ = version_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.url_ = url_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.revision_ = revision_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.user_ = user_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.date_ = date_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.srcChecksum_ = srcChecksum_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.versionMajor_ = versionMajor_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000080; } result.versionMinor_ = versionMinor_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) return this; if (other.hasVersion()) { bitField0_ |= 0x00000001; version_ = other.version_; onChanged(); } if (other.hasUrl()) { bitField0_ |= 0x00000002; url_ = other.url_; onChanged(); } if (other.hasRevision()) { bitField0_ |= 0x00000004; revision_ = other.revision_; onChanged(); } if (other.hasUser()) { bitField0_ |= 0x00000008; user_ = other.user_; onChanged(); } if (other.hasDate()) { bitField0_ |= 0x00000010; date_ = other.date_; onChanged(); } if (other.hasSrcChecksum()) { bitField0_ |= 0x00000020; srcChecksum_ = other.srcChecksum_; onChanged(); } if (other.hasVersionMajor()) { setVersionMajor(other.getVersionMajor()); } if (other.hasVersionMinor()) { setVersionMinor(other.getVersionMinor()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasVersion()) { return false; } if (!hasUrl()) { return false; } if (!hasRevision()) { return false; } if (!hasUser()) { return false; } if (!hasDate()) { return false; } if (!hasSrcChecksum()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string version = 1; private java.lang.Object version_ = ""; /** * <code>required string version = 1;</code> */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string version = 1;</code> */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string version = 1;</code> */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string version = 1;</code> */ public Builder setVersion( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; version_ = value; onChanged(); return this; } /** * <code>required string version = 1;</code> */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = getDefaultInstance().getVersion(); onChanged(); return this; } /** * <code>required string version = 1;</code> */ public Builder setVersionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; version_ = value; onChanged(); return this; } // required string url = 2; private java.lang.Object url_ = ""; /** * <code>required string url = 2;</code> */ public boolean hasUrl() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string url = 2;</code> */ public java.lang.String getUrl() { java.lang.Object ref = url_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); url_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string url = 2;</code> */ public com.google.protobuf.ByteString getUrlBytes() { java.lang.Object ref = url_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); url_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string url = 2;</code> */ public Builder setUrl( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; url_ = value; onChanged(); return this; } /** * <code>required string url = 2;</code> */ public Builder clearUrl() { bitField0_ = (bitField0_ & ~0x00000002); url_ = getDefaultInstance().getUrl(); onChanged(); return this; } /** * <code>required string url = 2;</code> */ public Builder setUrlBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; url_ = value; onChanged(); return this; } // required string revision = 3; private java.lang.Object revision_ = ""; /** * <code>required string revision = 3;</code> */ public boolean hasRevision() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string revision = 3;</code> */ public java.lang.String getRevision() { java.lang.Object ref = revision_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); revision_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string revision = 3;</code> */ public com.google.protobuf.ByteString getRevisionBytes() { java.lang.Object ref = revision_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); revision_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string revision = 3;</code> */ public Builder setRevision( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; revision_ = value; onChanged(); return this; } /** * <code>required string revision = 3;</code> */ public Builder clearRevision() { bitField0_ = (bitField0_ & ~0x00000004); revision_ = getDefaultInstance().getRevision(); onChanged(); return this; } /** * <code>required string revision = 3;</code> */ public Builder setRevisionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; revision_ = value; onChanged(); return this; } // required string user = 4; private java.lang.Object user_ = ""; /** * <code>required string user = 4;</code> */ public boolean hasUser() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required string user = 4;</code> */ public java.lang.String getUser() { java.lang.Object ref = user_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); user_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string user = 4;</code> */ public com.google.protobuf.ByteString getUserBytes() { java.lang.Object ref = user_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); user_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string user = 4;</code> */ public Builder setUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; user_ = value; onChanged(); return this; } /** * <code>required string user = 4;</code> */ public Builder clearUser() { bitField0_ = (bitField0_ & ~0x00000008); user_ = getDefaultInstance().getUser(); onChanged(); return this; } /** * <code>required string user = 4;</code> */ public Builder setUserBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; user_ = value; onChanged(); return this; } // required string date = 5; private java.lang.Object date_ = ""; /** * <code>required string date = 5;</code> */ public boolean hasDate() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required string date = 5;</code> */ public java.lang.String getDate() { java.lang.Object ref = date_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); date_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string date = 5;</code> */ public com.google.protobuf.ByteString getDateBytes() { java.lang.Object ref = date_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); date_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string date = 5;</code> */ public Builder setDate( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; date_ = value; onChanged(); return this; } /** * <code>required string date = 5;</code> */ public Builder clearDate() { bitField0_ = (bitField0_ & ~0x00000010); date_ = getDefaultInstance().getDate(); onChanged(); return this; } /** * <code>required string date = 5;</code> */ public Builder setDateBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; date_ = value; onChanged(); return this; } // required string src_checksum = 6; private java.lang.Object srcChecksum_ = ""; /** * <code>required string src_checksum = 6;</code> */ public boolean hasSrcChecksum() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>required string src_checksum = 6;</code> */ public java.lang.String getSrcChecksum() { java.lang.Object ref = srcChecksum_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); srcChecksum_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string src_checksum = 6;</code> */ public com.google.protobuf.ByteString getSrcChecksumBytes() { java.lang.Object ref = srcChecksum_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); srcChecksum_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string src_checksum = 6;</code> */ public Builder setSrcChecksum( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; srcChecksum_ = value; onChanged(); return this; } /** * <code>required string src_checksum = 6;</code> */ public Builder clearSrcChecksum() { bitField0_ = (bitField0_ & ~0x00000020); srcChecksum_ = getDefaultInstance().getSrcChecksum(); onChanged(); return this; } /** * <code>required string src_checksum = 6;</code> */ public Builder setSrcChecksumBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; srcChecksum_ = value; onChanged(); return this; } // optional uint32 version_major = 7; private int versionMajor_ ; /** * <code>optional uint32 version_major = 7;</code> */ public boolean hasVersionMajor() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 version_major = 7;</code> */ public int getVersionMajor() { return versionMajor_; } /** * <code>optional uint32 version_major = 7;</code> */ public Builder setVersionMajor(int value) { bitField0_ |= 0x00000040; versionMajor_ = value; onChanged(); return this; } /** * <code>optional uint32 version_major = 7;</code> */ public Builder clearVersionMajor() { bitField0_ = (bitField0_ & ~0x00000040); versionMajor_ = 0; onChanged(); return this; } // optional uint32 version_minor = 8; private int versionMinor_ ; /** * <code>optional uint32 version_minor = 8;</code> */ public boolean hasVersionMinor() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 version_minor = 8;</code> */ public int getVersionMinor() { return versionMinor_; } /** * <code>optional uint32 version_minor = 8;</code> */ public Builder setVersionMinor(int value) { bitField0_ |= 0x00000080; versionMinor_ = value; onChanged(); return this; } /** * <code>optional uint32 version_minor = 8;</code> */ public Builder clearVersionMinor() { bitField0_ = (bitField0_ & ~0x00000080); versionMinor_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.VersionInfo) } static { defaultInstance = new VersionInfo(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.VersionInfo) } public interface RegionServerInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional int32 infoPort = 1; /** * <code>optional int32 infoPort = 1;</code> */ boolean hasInfoPort(); /** * <code>optional int32 infoPort = 1;</code> */ int getInfoPort(); // optional .hbase.pb.VersionInfo version_info = 2; /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ boolean hasVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); } /** * Protobuf type {@code hbase.pb.RegionServerInfo} * * <pre> ** * Description of the region server info * </pre> */ public static final class RegionServerInfo extends com.google.protobuf.GeneratedMessage implements RegionServerInfoOrBuilder { // Use RegionServerInfo.newBuilder() to construct. private RegionServerInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RegionServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RegionServerInfo defaultInstance; public static RegionServerInfo getDefaultInstance() { return defaultInstance; } public RegionServerInfo getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionServerInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; infoPort_ = input.readInt32(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = versionInfo_.toBuilder(); } versionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(versionInfo_); versionInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class); } public static com.google.protobuf.Parser<RegionServerInfo> PARSER = new com.google.protobuf.AbstractParser<RegionServerInfo>() { public RegionServerInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RegionServerInfo(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RegionServerInfo> getParserForType() { return PARSER; } private int bitField0_; // optional int32 infoPort = 1; public static final int INFOPORT_FIELD_NUMBER = 1; private int infoPort_; /** * <code>optional int32 infoPort = 1;</code> */ public boolean hasInfoPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 infoPort = 1;</code> */ public int getInfoPort() { return infoPort_; } // optional .hbase.pb.VersionInfo version_info = 2; public static final int VERSION_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo versionInfo_; /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { return versionInfo_; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { return versionInfo_; } private void initFields() { infoPort_ = 0; versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, infoPort_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, versionInfo_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, infoPort_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, versionInfo_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) obj; boolean result = true; result = result && (hasInfoPort() == other.hasInfoPort()); if (hasInfoPort()) { result = result && (getInfoPort() == other.getInfoPort()); } result = result && (hasVersionInfo() == other.hasVersionInfo()); if (hasVersionInfo()) { result = result && getVersionInfo() .equals(other.getVersionInfo()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInfoPort()) { hash = (37 * hash) + INFOPORT_FIELD_NUMBER; hash = (53 * hash) + getInfoPort(); } if (hasVersionInfo()) { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RegionServerInfo} * * <pre> ** * Description of the region server info * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getVersionInfoFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); infoPort_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (versionInfoBuilder_ == null) { versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_RegionServerInfo_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo build() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo buildPartial() { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.infoPort_ = infoPort_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (versionInfoBuilder_ == null) { result.versionInfo_ = versionInfo_; } else { result.versionInfo_ = versionInfoBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other) { if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance()) return this; if (other.hasInfoPort()) { setInfoPort(other.getInfoPort()); } if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional int32 infoPort = 1; private int infoPort_ ; /** * <code>optional int32 infoPort = 1;</code> */ public boolean hasInfoPort() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 infoPort = 1;</code> */ public int getInfoPort() { return infoPort_; } /** * <code>optional int32 infoPort = 1;</code> */ public Builder setInfoPort(int value) { bitField0_ |= 0x00000001; infoPort_ = value; onChanged(); return this; } /** * <code>optional int32 infoPort = 1;</code> */ public Builder clearInfoPort() { bitField0_ = (bitField0_ & ~0x00000001); infoPort_ = 0; onChanged(); return this; } // optional .hbase.pb.VersionInfo version_info = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_; /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { if (versionInfoBuilder_ == null) { return versionInfo_; } else { return versionInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder setVersionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } versionInfo_ = value; onChanged(); } else { versionInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder setVersionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder builderForValue) { if (versionInfoBuilder_ == null) { versionInfo_ = builderForValue.build(); onChanged(); } else { versionInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder mergeVersionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && versionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) { versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial(); } else { versionInfo_ = value; } onChanged(); } else { versionInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public Builder clearVersionInfo() { if (versionInfoBuilder_ == null) { versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); onChanged(); } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder getVersionInfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getVersionInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { if (versionInfoBuilder_ != null) { return versionInfoBuilder_.getMessageOrBuilder(); } else { return versionInfo_; } } /** * <code>optional .hbase.pb.VersionInfo version_info = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> getVersionInfoFieldBuilder() { if (versionInfoBuilder_ == null) { versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>( versionInfo_, getParentForChildren(), isClean()); versionInfo_ = null; } return versionInfoBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerInfo) } static { defaultInstance = new RegionServerInfo(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerInfo) } private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableName_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_TableName_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableSchema_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_TableSchema_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TableState_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_TableState_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnFamilySchema_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RegionInfo_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FavoredNodes_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FavoredNodes_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionSpecifier_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_TimeRange_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_TimeRange_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ServerName_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ServerName_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Coprocessor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_Coprocessor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameStringPair_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_NameStringPair_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameBytesPair_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_NameBytesPair_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BytesBytesPair_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NameInt64Pair_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotDescription_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureDescription_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_EmptyMsg_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_EmptyMsg_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_LongMsg_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_LongMsg_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DoubleMsg_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_DoubleMsg_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BigDecimalMsg_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UUID_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_UUID_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_NamespaceDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_VersionInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_VersionInfo_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionServerInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\013HBase.proto\022\010hbase.pb\"1\n\tTableName\022\021\n\t" + "namespace\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014\"\314\001\n\013T" + "ableSchema\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.p" + "b.TableName\022,\n\nattributes\030\002 \003(\0132\030.hbase." + "pb.BytesBytesPair\0225\n\017column_families\030\003 \003" + "(\0132\034.hbase.pb.ColumnFamilySchema\022/\n\rconf" + "iguration\030\004 \003(\0132\030.hbase.pb.NameStringPai" + "r\"x\n\nTableState\022)\n\005state\030\001 \002(\0162\032.hbase.p" + "b.TableState.State\"?\n\005State\022\013\n\007ENABLED\020\000" + "\022\014\n\010DISABLED\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLIN", "G\020\003\"\201\001\n\022ColumnFamilySchema\022\014\n\004name\030\001 \002(\014" + "\022,\n\nattributes\030\002 \003(\0132\030.hbase.pb.BytesByt" + "esPair\022/\n\rconfiguration\030\003 \003(\0132\030.hbase.pb" + ".NameStringPair\"\243\001\n\nRegionInfo\022\021\n\tregion" + "_id\030\001 \002(\004\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb" + ".TableName\022\021\n\tstart_key\030\003 \001(\014\022\017\n\007end_key" + "\030\004 \001(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005split\030\006 \001(\010\022\025" + "\n\nreplica_id\030\007 \001(\005:\0010\":\n\014FavoredNodes\022*\n" + "\014favored_node\030\001 \003(\0132\024.hbase.pb.ServerNam" + "e\"\236\001\n\017RegionSpecifier\022;\n\004type\030\001 \002(\0162-.hb", "ase.pb.RegionSpecifier.RegionSpecifierTy" + "pe\022\r\n\005value\030\002 \002(\014\"?\n\023RegionSpecifierType" + "\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCODED_REGION_NAME" + "\020\002\"%\n\tTimeRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(" + "\004\"W\n\025ColumnFamilyTimeRange\022\025\n\rcolumn_fam" + "ily\030\001 \002(\014\022\'\n\ntime_range\030\002 \002(\0132\023.hbase.pb" + ".TimeRange\"A\n\nServerName\022\021\n\thost_name\030\001 " + "\002(\t\022\014\n\004port\030\002 \001(\r\022\022\n\nstart_code\030\003 \001(\004\"\033\n" + "\013Coprocessor\022\014\n\004name\030\001 \002(\t\"-\n\016NameString" + "Pair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNam", "eBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"" + "/\n\016BytesBytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006seco" + "nd\030\002 \002(\014\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022" + "\r\n\005value\030\002 \001(\003\"\325\001\n\023SnapshotDescription\022\014" + "\n\004name\030\001 \002(\t\022\r\n\005table\030\002 \001(\t\022\030\n\rcreation_" + "time\030\003 \001(\003:\0010\0227\n\004type\030\004 \001(\0162\".hbase.pb.S" + "napshotDescription.Type:\005FLUSH\022\017\n\007versio" + "n\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\".\n\004Type\022\014\n\010DISABL" + "ED\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002\"\206\001\n\024Proce" + "dureDescription\022\021\n\tsignature\030\001 \002(\t\022\020\n\010in", "stance\030\002 \001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\022/" + "\n\rconfiguration\030\004 \003(\0132\030.hbase.pb.NameStr" + "ingPair\"\n\n\010EmptyMsg\"\033\n\007LongMsg\022\020\n\010long_m" + "sg\030\001 \002(\003\"\037\n\tDoubleMsg\022\022\n\ndouble_msg\030\001 \002(" + "\001\"\'\n\rBigDecimalMsg\022\026\n\016bigdecimal_msg\030\001 \002" + "(\014\"5\n\004UUID\022\026\n\016least_sig_bits\030\001 \002(\004\022\025\n\rmo" + "st_sig_bits\030\002 \002(\004\"T\n\023NamespaceDescriptor" + "\022\014\n\004name\030\001 \002(\014\022/\n\rconfiguration\030\002 \003(\0132\030." + "hbase.pb.NameStringPair\"\235\001\n\013VersionInfo\022" + "\017\n\007version\030\001 \002(\t\022\013\n\003url\030\002 \002(\t\022\020\n\010revisio", "n\030\003 \002(\t\022\014\n\004user\030\004 \002(\t\022\014\n\004date\030\005 \002(\t\022\024\n\014s" + "rc_checksum\030\006 \002(\t\022\025\n\rversion_major\030\007 \001(\r" + "\022\025\n\rversion_minor\030\010 \001(\r\"Q\n\020RegionServerI" + "nfo\022\020\n\010infoPort\030\001 \001(\005\022+\n\014version_info\030\002 " + "\001(\0132\025.hbase.pb.VersionInfo*r\n\013CompareTyp" + "e\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020" + "\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013" + "\n\007GREATER\020\005\022\t\n\005NO_OP\020\006*n\n\010TimeUnit\022\017\n\013NA" + "NOSECONDS\020\001\022\020\n\014MICROSECONDS\020\002\022\020\n\014MILLISE" + "CONDS\020\003\022\013\n\007SECONDS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HOU", "RS\020\006\022\010\n\004DAYS\020\007B>\n*org.apache.hadoop.hbas" + "e.protobuf.generatedB\013HBaseProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_hbase_pb_TableName_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_TableName_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_TableName_descriptor, new java.lang.String[] { "Namespace", "Qualifier", }); internal_static_hbase_pb_TableSchema_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_TableSchema_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_TableSchema_descriptor, new java.lang.String[] { "TableName", "Attributes", "ColumnFamilies", "Configuration", }); internal_static_hbase_pb_TableState_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_TableState_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_TableState_descriptor, new java.lang.String[] { "State", }); internal_static_hbase_pb_ColumnFamilySchema_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_ColumnFamilySchema_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ColumnFamilySchema_descriptor, new java.lang.String[] { "Name", "Attributes", "Configuration", }); internal_static_hbase_pb_RegionInfo_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_RegionInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionInfo_descriptor, new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", "ReplicaId", }); internal_static_hbase_pb_FavoredNodes_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_FavoredNodes_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FavoredNodes_descriptor, new java.lang.String[] { "FavoredNode", }); internal_static_hbase_pb_RegionSpecifier_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_RegionSpecifier_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionSpecifier_descriptor, new java.lang.String[] { "Type", "Value", }); internal_static_hbase_pb_TimeRange_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_TimeRange_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_TimeRange_descriptor, new java.lang.String[] { "From", "To", }); internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hbase_pb_ColumnFamilyTimeRange_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ColumnFamilyTimeRange_descriptor, new java.lang.String[] { "ColumnFamily", "TimeRange", }); internal_static_hbase_pb_ServerName_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_hbase_pb_ServerName_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ServerName_descriptor, new java.lang.String[] { "HostName", "Port", "StartCode", }); internal_static_hbase_pb_Coprocessor_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_hbase_pb_Coprocessor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_Coprocessor_descriptor, new java.lang.String[] { "Name", }); internal_static_hbase_pb_NameStringPair_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_hbase_pb_NameStringPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_NameStringPair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_hbase_pb_NameBytesPair_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_hbase_pb_NameBytesPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_NameBytesPair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_hbase_pb_BytesBytesPair_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_hbase_pb_BytesBytesPair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BytesBytesPair_descriptor, new java.lang.String[] { "First", "Second", }); internal_static_hbase_pb_NameInt64Pair_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_NameInt64Pair_descriptor, new java.lang.String[] { "Name", "Value", }); internal_static_hbase_pb_SnapshotDescription_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SnapshotDescription_descriptor, new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", }); internal_static_hbase_pb_ProcedureDescription_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ProcedureDescription_descriptor, new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", }); internal_static_hbase_pb_EmptyMsg_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_hbase_pb_EmptyMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_EmptyMsg_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_LongMsg_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_hbase_pb_LongMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_LongMsg_descriptor, new java.lang.String[] { "LongMsg", }); internal_static_hbase_pb_DoubleMsg_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_hbase_pb_DoubleMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_DoubleMsg_descriptor, new java.lang.String[] { "DoubleMsg", }); internal_static_hbase_pb_BigDecimalMsg_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BigDecimalMsg_descriptor, new java.lang.String[] { "BigdecimalMsg", }); internal_static_hbase_pb_UUID_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_hbase_pb_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_UUID_descriptor, new java.lang.String[] { "LeastSigBits", "MostSigBits", }); internal_static_hbase_pb_NamespaceDescriptor_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_NamespaceDescriptor_descriptor, new java.lang.String[] { "Name", "Configuration", }); internal_static_hbase_pb_VersionInfo_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hbase_pb_VersionInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_VersionInfo_descriptor, new java.lang.String[] { "Version", "Url", "Revision", "User", "Date", "SrcChecksum", "VersionMajor", "VersionMinor", }); internal_static_hbase_pb_RegionServerInfo_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionServerInfo_descriptor, new java.lang.String[] { "InfoPort", "VersionInfo", }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }