// Generated by the protocol buffer compiler. DO NOT EDIT! // source: Master.proto package org.apache.hadoop.hbase.protobuf.generated; public final class MasterProtos { private MasterProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface AddColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // required .ColumnFamilySchema column_families = 2; /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ boolean hasColumnFamilies(); /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); } /** * Protobuf type {@code AddColumnRequest} */ public static final class AddColumnRequest extends com.google.protobuf.GeneratedMessage implements AddColumnRequestOrBuilder { // Use AddColumnRequest.newBuilder() to construct. private AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private AddColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final AddColumnRequest defaultInstance; public static AddColumnRequest getDefaultInstance() { return defaultInstance; } public AddColumnRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AddColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = columnFamilies_.toBuilder(); } columnFamilies_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(columnFamilies_); columnFamilies_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class); } public static com.google.protobuf.Parser<AddColumnRequest> PARSER = new com.google.protobuf.AbstractParser<AddColumnRequest>() { public AddColumnRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AddColumnRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<AddColumnRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // required .ColumnFamilySchema column_families = 2; public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { return columnFamilies_; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { return columnFamilies_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasColumnFamilies()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getColumnFamilies().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, columnFamilies_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, columnFamilies_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasColumnFamilies() == other.hasColumnFamilies()); if (hasColumnFamilies()) { result = result && getColumnFamilies() .equals(other.getColumnFamilies()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasColumnFamilies()) { hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER; hash = (53 * hash) + getColumnFamilies().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code AddColumnRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getColumnFamiliesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (columnFamiliesBuilder_ == null) { result.columnFamilies_ = columnFamilies_; } else { result.columnFamilies_ = columnFamiliesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasColumnFamilies()) { mergeColumnFamilies(other.getColumnFamilies()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasColumnFamilies()) { return false; } if (!getTableName().isInitialized()) { return false; } if (!getColumnFamilies().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // required .ColumnFamilySchema column_families = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { return columnFamilies_; } else { return columnFamiliesBuilder_.getMessage(); } } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } columnFamilies_ = value; onChanged(); } else { columnFamiliesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { columnFamilies_ = builderForValue.build(); onChanged(); } else { columnFamiliesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); } else { columnFamilies_ = value; } onChanged(); } else { columnFamiliesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); onChanged(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getColumnFamiliesFieldBuilder().getBuilder(); } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); } else { return columnFamilies_; } } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, getParentForChildren(), isClean()); columnFamilies_ = null; } return columnFamiliesBuilder_; } // @@protoc_insertion_point(builder_scope:AddColumnRequest) } static { defaultInstance = new AddColumnRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AddColumnRequest) } public interface AddColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code AddColumnResponse} */ public static final class AddColumnResponse extends com.google.protobuf.GeneratedMessage implements AddColumnResponseOrBuilder { // Use AddColumnResponse.newBuilder() to construct. private AddColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private AddColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final AddColumnResponse defaultInstance; public static AddColumnResponse getDefaultInstance() { return defaultInstance; } public AddColumnResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AddColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class); } public static com.google.protobuf.Parser<AddColumnResponse> PARSER = new com.google.protobuf.AbstractParser<AddColumnResponse>() { public AddColumnResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AddColumnResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<AddColumnResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code AddColumnResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:AddColumnResponse) } static { defaultInstance = new AddColumnResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AddColumnResponse) } public interface DeleteColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // required bytes column_name = 2; /** * <code>required bytes column_name = 2;</code> */ boolean hasColumnName(); /** * <code>required bytes column_name = 2;</code> */ com.google.protobuf.ByteString getColumnName(); } /** * Protobuf type {@code DeleteColumnRequest} */ public static final class DeleteColumnRequest extends com.google.protobuf.GeneratedMessage implements DeleteColumnRequestOrBuilder { // Use DeleteColumnRequest.newBuilder() to construct. private DeleteColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteColumnRequest defaultInstance; public static DeleteColumnRequest getDefaultInstance() { return defaultInstance; } public DeleteColumnRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { bitField0_ |= 0x00000002; columnName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class); } public static com.google.protobuf.Parser<DeleteColumnRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteColumnRequest>() { public DeleteColumnRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteColumnRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteColumnRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // required bytes column_name = 2; public static final int COLUMN_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnName_; /** * <code>required bytes column_name = 2;</code> */ public boolean hasColumnName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes column_name = 2;</code> */ public com.google.protobuf.ByteString getColumnName() { return columnName_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); columnName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasColumnName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, columnName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, columnName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasColumnName() == other.hasColumnName()); if (hasColumnName()) { result = result && getColumnName() .equals(other.getColumnName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasColumnName()) { hash = (37 * hash) + COLUMN_NAME_FIELD_NUMBER; hash = (53 * hash) + getColumnName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteColumnRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); columnName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.columnName_ = columnName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasColumnName()) { setColumnName(other.getColumnName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasColumnName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // required bytes column_name = 2; private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes column_name = 2;</code> */ public boolean hasColumnName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes column_name = 2;</code> */ public com.google.protobuf.ByteString getColumnName() { return columnName_; } /** * <code>required bytes column_name = 2;</code> */ public Builder setColumnName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; columnName_ = value; onChanged(); return this; } /** * <code>required bytes column_name = 2;</code> */ public Builder clearColumnName() { bitField0_ = (bitField0_ & ~0x00000002); columnName_ = getDefaultInstance().getColumnName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:DeleteColumnRequest) } static { defaultInstance = new DeleteColumnRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteColumnRequest) } public interface DeleteColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code DeleteColumnResponse} */ public static final class DeleteColumnResponse extends com.google.protobuf.GeneratedMessage implements DeleteColumnResponseOrBuilder { // Use DeleteColumnResponse.newBuilder() to construct. private DeleteColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteColumnResponse defaultInstance; public static DeleteColumnResponse getDefaultInstance() { return defaultInstance; } public DeleteColumnResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class); } public static com.google.protobuf.Parser<DeleteColumnResponse> PARSER = new com.google.protobuf.AbstractParser<DeleteColumnResponse>() { public DeleteColumnResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteColumnResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteColumnResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteColumnResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:DeleteColumnResponse) } static { defaultInstance = new DeleteColumnResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteColumnResponse) } public interface ModifyColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // required .ColumnFamilySchema column_families = 2; /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ boolean hasColumnFamilies(); /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); } /** * Protobuf type {@code ModifyColumnRequest} */ public static final class ModifyColumnRequest extends com.google.protobuf.GeneratedMessage implements ModifyColumnRequestOrBuilder { // Use ModifyColumnRequest.newBuilder() to construct. private ModifyColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ModifyColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ModifyColumnRequest defaultInstance; public static ModifyColumnRequest getDefaultInstance() { return defaultInstance; } public ModifyColumnRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ModifyColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = columnFamilies_.toBuilder(); } columnFamilies_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(columnFamilies_); columnFamilies_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.Builder.class); } public static com.google.protobuf.Parser<ModifyColumnRequest> PARSER = new com.google.protobuf.AbstractParser<ModifyColumnRequest>() { public ModifyColumnRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ModifyColumnRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ModifyColumnRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // required .ColumnFamilySchema column_families = 2; public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { return columnFamilies_; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { return columnFamilies_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasColumnFamilies()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getColumnFamilies().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, columnFamilies_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, columnFamilies_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasColumnFamilies() == other.hasColumnFamilies()); if (hasColumnFamilies()) { result = result && getColumnFamilies() .equals(other.getColumnFamilies()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasColumnFamilies()) { hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER; hash = (53 * hash) + getColumnFamilies().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ModifyColumnRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getColumnFamiliesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (columnFamiliesBuilder_ == null) { result.columnFamilies_ = columnFamilies_; } else { result.columnFamilies_ = columnFamiliesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasColumnFamilies()) { mergeColumnFamilies(other.getColumnFamilies()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasColumnFamilies()) { return false; } if (!getTableName().isInitialized()) { return false; } if (!getColumnFamilies().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // required .ColumnFamilySchema column_families = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { return columnFamilies_; } else { return columnFamiliesBuilder_.getMessage(); } } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } columnFamilies_ = value; onChanged(); } else { columnFamiliesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { columnFamilies_ = builderForValue.build(); onChanged(); } else { columnFamiliesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); } else { columnFamilies_ = value; } onChanged(); } else { columnFamiliesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); onChanged(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getColumnFamiliesFieldBuilder().getBuilder(); } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); } else { return columnFamilies_; } } /** * <code>required .ColumnFamilySchema column_families = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, getParentForChildren(), isClean()); columnFamilies_ = null; } return columnFamiliesBuilder_; } // @@protoc_insertion_point(builder_scope:ModifyColumnRequest) } static { defaultInstance = new ModifyColumnRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyColumnRequest) } public interface ModifyColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code ModifyColumnResponse} */ public static final class ModifyColumnResponse extends com.google.protobuf.GeneratedMessage implements ModifyColumnResponseOrBuilder { // Use ModifyColumnResponse.newBuilder() to construct. private ModifyColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ModifyColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ModifyColumnResponse defaultInstance; public static ModifyColumnResponse getDefaultInstance() { return defaultInstance; } public ModifyColumnResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ModifyColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.Builder.class); } public static com.google.protobuf.Parser<ModifyColumnResponse> PARSER = new com.google.protobuf.AbstractParser<ModifyColumnResponse>() { public ModifyColumnResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ModifyColumnResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ModifyColumnResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ModifyColumnResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:ModifyColumnResponse) } static { defaultInstance = new ModifyColumnResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyColumnResponse) } public interface MoveRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; /** * <code>required .RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); // optional .ServerName dest_server_name = 2; /** * <code>optional .ServerName dest_server_name = 2;</code> */ boolean hasDestServerName(); /** * <code>optional .ServerName dest_server_name = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName(); /** * <code>optional .ServerName dest_server_name = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder(); } /** * Protobuf type {@code MoveRegionRequest} */ public static final class MoveRegionRequest extends com.google.protobuf.GeneratedMessage implements MoveRegionRequestOrBuilder { // Use MoveRegionRequest.newBuilder() to construct. private MoveRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private MoveRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final MoveRegionRequest defaultInstance; public static MoveRegionRequest getDefaultInstance() { return defaultInstance; } public MoveRegionRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MoveRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = destServerName_.toBuilder(); } destServerName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(destServerName_); destServerName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.Builder.class); } public static com.google.protobuf.Parser<MoveRegionRequest> PARSER = new com.google.protobuf.AbstractParser<MoveRegionRequest>() { public MoveRegionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MoveRegionRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<MoveRegionRequest> getParserForType() { return PARSER; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } // optional .ServerName dest_server_name = 2; public static final int DEST_SERVER_NAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_; /** * <code>optional .ServerName dest_server_name = 2;</code> */ public boolean hasDestServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { return destServerName_; } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { return destServerName_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } if (hasDestServerName()) { if (!getDestServerName().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, destServerName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, destServerName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasDestServerName() == other.hasDestServerName()); if (hasDestServerName()) { result = result && getDestServerName() .equals(other.getDestServerName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasDestServerName()) { hash = (37 * hash) + DEST_SERVER_NAME_FIELD_NUMBER; hash = (53 * hash) + getDestServerName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code MoveRegionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); getDestServerNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (destServerNameBuilder_ == null) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } else { destServerNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (destServerNameBuilder_ == null) { result.destServerName_ = destServerName_; } else { result.destServerName_ = destServerNameBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasDestServerName()) { mergeDestServerName(other.getDestServerName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } if (hasDestServerName()) { if (!getDestServerName().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } /** * <code>required .RegionSpecifier region = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // optional .ServerName dest_server_name = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_; /** * <code>optional .ServerName dest_server_name = 2;</code> */ public boolean hasDestServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { if (destServerNameBuilder_ == null) { return destServerName_; } else { return destServerNameBuilder_.getMessage(); } } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public Builder setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } destServerName_ = value; onChanged(); } else { destServerNameBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public Builder setDestServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (destServerNameBuilder_ == null) { destServerName_ = builderForValue.build(); onChanged(); } else { destServerNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public Builder mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && destServerName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destServerName_).mergeFrom(value).buildPartial(); } else { destServerName_ = value; } onChanged(); } else { destServerNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public Builder clearDestServerName() { if (destServerNameBuilder_ == null) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); onChanged(); } else { destServerNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestServerNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDestServerNameFieldBuilder().getBuilder(); } /** * <code>optional .ServerName dest_server_name = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { if (destServerNameBuilder_ != null) { return destServerNameBuilder_.getMessageOrBuilder(); } else { return destServerName_; } } /** * <code>optional .ServerName dest_server_name = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestServerNameFieldBuilder() { if (destServerNameBuilder_ == null) { destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( destServerName_, getParentForChildren(), isClean()); destServerName_ = null; } return destServerNameBuilder_; } // @@protoc_insertion_point(builder_scope:MoveRegionRequest) } static { defaultInstance = new MoveRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:MoveRegionRequest) } public interface MoveRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code MoveRegionResponse} */ public static final class MoveRegionResponse extends com.google.protobuf.GeneratedMessage implements MoveRegionResponseOrBuilder { // Use MoveRegionResponse.newBuilder() to construct. private MoveRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private MoveRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final MoveRegionResponse defaultInstance; public static MoveRegionResponse getDefaultInstance() { return defaultInstance; } public MoveRegionResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MoveRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.Builder.class); } public static com.google.protobuf.Parser<MoveRegionResponse> PARSER = new com.google.protobuf.AbstractParser<MoveRegionResponse>() { public MoveRegionResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MoveRegionResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<MoveRegionResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code MoveRegionResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:MoveRegionResponse) } static { defaultInstance = new MoveRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:MoveRegionResponse) } public interface DispatchMergingRegionsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region_a = 1; /** * <code>required .RegionSpecifier region_a = 1;</code> */ boolean hasRegionA(); /** * <code>required .RegionSpecifier region_a = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA(); /** * <code>required .RegionSpecifier region_a = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder(); // required .RegionSpecifier region_b = 2; /** * <code>required .RegionSpecifier region_b = 2;</code> */ boolean hasRegionB(); /** * <code>required .RegionSpecifier region_b = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB(); /** * <code>required .RegionSpecifier region_b = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder(); // optional bool forcible = 3 [default = false]; /** * <code>optional bool forcible = 3 [default = false];</code> */ boolean hasForcible(); /** * <code>optional bool forcible = 3 [default = false];</code> */ boolean getForcible(); } /** * Protobuf type {@code DispatchMergingRegionsRequest} * * <pre> ** * Dispatch merging the specified regions. * </pre> */ public static final class DispatchMergingRegionsRequest extends com.google.protobuf.GeneratedMessage implements DispatchMergingRegionsRequestOrBuilder { // Use DispatchMergingRegionsRequest.newBuilder() to construct. private DispatchMergingRegionsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DispatchMergingRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DispatchMergingRegionsRequest defaultInstance; public static DispatchMergingRegionsRequest getDefaultInstance() { return defaultInstance; } public DispatchMergingRegionsRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DispatchMergingRegionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = regionA_.toBuilder(); } regionA_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(regionA_); regionA_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = regionB_.toBuilder(); } regionB_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(regionB_); regionB_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 24: { bitField0_ |= 0x00000004; forcible_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.Builder.class); } public static com.google.protobuf.Parser<DispatchMergingRegionsRequest> PARSER = new com.google.protobuf.AbstractParser<DispatchMergingRegionsRequest>() { public DispatchMergingRegionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DispatchMergingRegionsRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DispatchMergingRegionsRequest> getParserForType() { return PARSER; } private int bitField0_; // required .RegionSpecifier region_a = 1; public static final int REGION_A_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_; /** * <code>required .RegionSpecifier region_a = 1;</code> */ public boolean hasRegionA() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { return regionA_; } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { return regionA_; } // required .RegionSpecifier region_b = 2; public static final int REGION_B_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_; /** * <code>required .RegionSpecifier region_b = 2;</code> */ public boolean hasRegionB() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { return regionB_; } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { return regionB_; } // optional bool forcible = 3 [default = false]; public static final int FORCIBLE_FIELD_NUMBER = 3; private boolean forcible_; /** * <code>optional bool forcible = 3 [default = false];</code> */ public boolean hasForcible() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool forcible = 3 [default = false];</code> */ public boolean getForcible() { return forcible_; } private void initFields() { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); forcible_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegionA()) { memoizedIsInitialized = 0; return false; } if (!hasRegionB()) { memoizedIsInitialized = 0; return false; } if (!getRegionA().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getRegionB().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, regionA_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, regionB_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, forcible_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, regionA_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, regionB_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, forcible_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) obj; boolean result = true; result = result && (hasRegionA() == other.hasRegionA()); if (hasRegionA()) { result = result && getRegionA() .equals(other.getRegionA()); } result = result && (hasRegionB() == other.hasRegionB()); if (hasRegionB()) { result = result && getRegionB() .equals(other.getRegionB()); } result = result && (hasForcible() == other.hasForcible()); if (hasForcible()) { result = result && (getForcible() == other.getForcible()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegionA()) { hash = (37 * hash) + REGION_A_FIELD_NUMBER; hash = (53 * hash) + getRegionA().hashCode(); } if (hasRegionB()) { hash = (37 * hash) + REGION_B_FIELD_NUMBER; hash = (53 * hash) + getRegionB().hashCode(); } if (hasForcible()) { hash = (37 * hash) + FORCIBLE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getForcible()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DispatchMergingRegionsRequest} * * <pre> ** * Dispatch merging the specified regions. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionAFieldBuilder(); getRegionBFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionABuilder_ == null) { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionABuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionBBuilder_ == null) { regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); forcible_ = false; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionABuilder_ == null) { result.regionA_ = regionA_; } else { result.regionA_ = regionABuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (regionBBuilder_ == null) { result.regionB_ = regionB_; } else { result.regionB_ = regionBBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.forcible_ = forcible_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance()) return this; if (other.hasRegionA()) { mergeRegionA(other.getRegionA()); } if (other.hasRegionB()) { mergeRegionB(other.getRegionB()); } if (other.hasForcible()) { setForcible(other.getForcible()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegionA()) { return false; } if (!hasRegionB()) { return false; } if (!getRegionA().isInitialized()) { return false; } if (!getRegionB().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .RegionSpecifier region_a = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_; /** * <code>required .RegionSpecifier region_a = 1;</code> */ public boolean hasRegionA() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() { if (regionABuilder_ == null) { return regionA_; } else { return regionABuilder_.getMessage(); } } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public Builder setRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (value == null) { throw new NullPointerException(); } regionA_ = value; onChanged(); } else { regionABuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public Builder setRegionA( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionABuilder_ == null) { regionA_ = builderForValue.build(); onChanged(); } else { regionABuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public Builder mergeRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionABuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && regionA_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionA_).mergeFrom(value).buildPartial(); } else { regionA_ = value; } onChanged(); } else { regionABuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public Builder clearRegionA() { if (regionABuilder_ == null) { regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionABuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionABuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionAFieldBuilder().getBuilder(); } /** * <code>required .RegionSpecifier region_a = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() { if (regionABuilder_ != null) { return regionABuilder_.getMessageOrBuilder(); } else { return regionA_; } } /** * <code>required .RegionSpecifier region_a = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionAFieldBuilder() { if (regionABuilder_ == null) { regionABuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( regionA_, getParentForChildren(), isClean()); regionA_ = null; } return regionABuilder_; } // required .RegionSpecifier region_b = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_; /** * <code>required .RegionSpecifier region_b = 2;</code> */ public boolean hasRegionB() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() { if (regionBBuilder_ == null) { return regionB_; } else { return regionBBuilder_.getMessage(); } } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public Builder setRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (value == null) { throw new NullPointerException(); } regionB_ = value; onChanged(); } else { regionBBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public Builder setRegionB( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBBuilder_ == null) { regionB_ = builderForValue.build(); onChanged(); } else { regionBBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public Builder mergeRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && regionB_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionB_).mergeFrom(value).buildPartial(); } else { regionB_ = value; } onChanged(); } else { regionBBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public Builder clearRegionB() { if (regionBBuilder_ == null) { regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRegionBFieldBuilder().getBuilder(); } /** * <code>required .RegionSpecifier region_b = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() { if (regionBBuilder_ != null) { return regionBBuilder_.getMessageOrBuilder(); } else { return regionB_; } } /** * <code>required .RegionSpecifier region_b = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionBFieldBuilder() { if (regionBBuilder_ == null) { regionBBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( regionB_, getParentForChildren(), isClean()); regionB_ = null; } return regionBBuilder_; } // optional bool forcible = 3 [default = false]; private boolean forcible_ ; /** * <code>optional bool forcible = 3 [default = false];</code> */ public boolean hasForcible() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool forcible = 3 [default = false];</code> */ public boolean getForcible() { return forcible_; } /** * <code>optional bool forcible = 3 [default = false];</code> */ public Builder setForcible(boolean value) { bitField0_ |= 0x00000004; forcible_ = value; onChanged(); return this; } /** * <code>optional bool forcible = 3 [default = false];</code> */ public Builder clearForcible() { bitField0_ = (bitField0_ & ~0x00000004); forcible_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:DispatchMergingRegionsRequest) } static { defaultInstance = new DispatchMergingRegionsRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DispatchMergingRegionsRequest) } public interface DispatchMergingRegionsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code DispatchMergingRegionsResponse} */ public static final class DispatchMergingRegionsResponse extends com.google.protobuf.GeneratedMessage implements DispatchMergingRegionsResponseOrBuilder { // Use DispatchMergingRegionsResponse.newBuilder() to construct. private DispatchMergingRegionsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DispatchMergingRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DispatchMergingRegionsResponse defaultInstance; public static DispatchMergingRegionsResponse getDefaultInstance() { return defaultInstance; } public DispatchMergingRegionsResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DispatchMergingRegionsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.Builder.class); } public static com.google.protobuf.Parser<DispatchMergingRegionsResponse> PARSER = new com.google.protobuf.AbstractParser<DispatchMergingRegionsResponse>() { public DispatchMergingRegionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DispatchMergingRegionsResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DispatchMergingRegionsResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DispatchMergingRegionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:DispatchMergingRegionsResponse) } static { defaultInstance = new DispatchMergingRegionsResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DispatchMergingRegionsResponse) } public interface AssignRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; /** * <code>required .RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } /** * Protobuf type {@code AssignRegionRequest} */ public static final class AssignRegionRequest extends com.google.protobuf.GeneratedMessage implements AssignRegionRequestOrBuilder { // Use AssignRegionRequest.newBuilder() to construct. private AssignRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private AssignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final AssignRegionRequest defaultInstance; public static AssignRegionRequest getDefaultInstance() { return defaultInstance; } public AssignRegionRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AssignRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.Builder.class); } public static com.google.protobuf.Parser<AssignRegionRequest> PARSER = new com.google.protobuf.AbstractParser<AssignRegionRequest>() { public AssignRegionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AssignRegionRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<AssignRegionRequest> getParserForType() { return PARSER; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code AssignRegionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } /** * <code>required .RegionSpecifier region = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // @@protoc_insertion_point(builder_scope:AssignRegionRequest) } static { defaultInstance = new AssignRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AssignRegionRequest) } public interface AssignRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code AssignRegionResponse} */ public static final class AssignRegionResponse extends com.google.protobuf.GeneratedMessage implements AssignRegionResponseOrBuilder { // Use AssignRegionResponse.newBuilder() to construct. private AssignRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private AssignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final AssignRegionResponse defaultInstance; public static AssignRegionResponse getDefaultInstance() { return defaultInstance; } public AssignRegionResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AssignRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.Builder.class); } public static com.google.protobuf.Parser<AssignRegionResponse> PARSER = new com.google.protobuf.AbstractParser<AssignRegionResponse>() { public AssignRegionResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AssignRegionResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<AssignRegionResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code AssignRegionResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:AssignRegionResponse) } static { defaultInstance = new AssignRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AssignRegionResponse) } public interface UnassignRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; /** * <code>required .RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); // optional bool force = 2 [default = false]; /** * <code>optional bool force = 2 [default = false];</code> */ boolean hasForce(); /** * <code>optional bool force = 2 [default = false];</code> */ boolean getForce(); } /** * Protobuf type {@code UnassignRegionRequest} */ public static final class UnassignRegionRequest extends com.google.protobuf.GeneratedMessage implements UnassignRegionRequestOrBuilder { // Use UnassignRegionRequest.newBuilder() to construct. private UnassignRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private UnassignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final UnassignRegionRequest defaultInstance; public static UnassignRegionRequest getDefaultInstance() { return defaultInstance; } public UnassignRegionRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UnassignRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; force_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.Builder.class); } public static com.google.protobuf.Parser<UnassignRegionRequest> PARSER = new com.google.protobuf.AbstractParser<UnassignRegionRequest>() { public UnassignRegionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UnassignRegionRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<UnassignRegionRequest> getParserForType() { return PARSER; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } // optional bool force = 2 [default = false]; public static final int FORCE_FIELD_NUMBER = 2; private boolean force_; /** * <code>optional bool force = 2 [default = false];</code> */ public boolean hasForce() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool force = 2 [default = false];</code> */ public boolean getForce() { return force_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); force_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, force_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, force_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasForce() == other.hasForce()); if (hasForce()) { result = result && (getForce() == other.getForce()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasForce()) { hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getForce()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code UnassignRegionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); force_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.force_ = force_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasForce()) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } /** * <code>required .RegionSpecifier region = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // optional bool force = 2 [default = false]; private boolean force_ ; /** * <code>optional bool force = 2 [default = false];</code> */ public boolean hasForce() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool force = 2 [default = false];</code> */ public boolean getForce() { return force_; } /** * <code>optional bool force = 2 [default = false];</code> */ public Builder setForce(boolean value) { bitField0_ |= 0x00000002; force_ = value; onChanged(); return this; } /** * <code>optional bool force = 2 [default = false];</code> */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000002); force_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:UnassignRegionRequest) } static { defaultInstance = new UnassignRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:UnassignRegionRequest) } public interface UnassignRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code UnassignRegionResponse} */ public static final class UnassignRegionResponse extends com.google.protobuf.GeneratedMessage implements UnassignRegionResponseOrBuilder { // Use UnassignRegionResponse.newBuilder() to construct. private UnassignRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private UnassignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final UnassignRegionResponse defaultInstance; public static UnassignRegionResponse getDefaultInstance() { return defaultInstance; } public UnassignRegionResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UnassignRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.Builder.class); } public static com.google.protobuf.Parser<UnassignRegionResponse> PARSER = new com.google.protobuf.AbstractParser<UnassignRegionResponse>() { public UnassignRegionResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UnassignRegionResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<UnassignRegionResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code UnassignRegionResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:UnassignRegionResponse) } static { defaultInstance = new UnassignRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:UnassignRegionResponse) } public interface OfflineRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; /** * <code>required .RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } /** * Protobuf type {@code OfflineRegionRequest} */ public static final class OfflineRegionRequest extends com.google.protobuf.GeneratedMessage implements OfflineRegionRequestOrBuilder { // Use OfflineRegionRequest.newBuilder() to construct. private OfflineRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private OfflineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final OfflineRegionRequest defaultInstance; public static OfflineRegionRequest getDefaultInstance() { return defaultInstance; } public OfflineRegionRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private OfflineRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.Builder.class); } public static com.google.protobuf.Parser<OfflineRegionRequest> PARSER = new com.google.protobuf.AbstractParser<OfflineRegionRequest>() { public OfflineRegionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new OfflineRegionRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<OfflineRegionRequest> getParserForType() { return PARSER; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code OfflineRegionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } /** * <code>required .RegionSpecifier region = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // @@protoc_insertion_point(builder_scope:OfflineRegionRequest) } static { defaultInstance = new OfflineRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:OfflineRegionRequest) } public interface OfflineRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code OfflineRegionResponse} */ public static final class OfflineRegionResponse extends com.google.protobuf.GeneratedMessage implements OfflineRegionResponseOrBuilder { // Use OfflineRegionResponse.newBuilder() to construct. private OfflineRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private OfflineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final OfflineRegionResponse defaultInstance; public static OfflineRegionResponse getDefaultInstance() { return defaultInstance; } public OfflineRegionResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private OfflineRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.Builder.class); } public static com.google.protobuf.Parser<OfflineRegionResponse> PARSER = new com.google.protobuf.AbstractParser<OfflineRegionResponse>() { public OfflineRegionResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new OfflineRegionResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<OfflineRegionResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code OfflineRegionResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:OfflineRegionResponse) } static { defaultInstance = new OfflineRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:OfflineRegionResponse) } public interface CreateTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableSchema table_schema = 1; /** * <code>required .TableSchema table_schema = 1;</code> */ boolean hasTableSchema(); /** * <code>required .TableSchema table_schema = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); /** * <code>required .TableSchema table_schema = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); // repeated bytes split_keys = 2; /** * <code>repeated bytes split_keys = 2;</code> */ java.util.List<com.google.protobuf.ByteString> getSplitKeysList(); /** * <code>repeated bytes split_keys = 2;</code> */ int getSplitKeysCount(); /** * <code>repeated bytes split_keys = 2;</code> */ com.google.protobuf.ByteString getSplitKeys(int index); } /** * Protobuf type {@code CreateTableRequest} */ public static final class CreateTableRequest extends com.google.protobuf.GeneratedMessage implements CreateTableRequestOrBuilder { // Use CreateTableRequest.newBuilder() to construct. private CreateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CreateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CreateTableRequest defaultInstance; public static CreateTableRequest getDefaultInstance() { return defaultInstance; } public CreateTableRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableSchema_.toBuilder(); } tableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableSchema_); tableSchema_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); mutable_bitField0_ |= 0x00000002; } splitKeys_.add(input.readBytes()); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.Builder.class); } public static com.google.protobuf.Parser<CreateTableRequest> PARSER = new com.google.protobuf.AbstractParser<CreateTableRequest>() { public CreateTableRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateTableRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CreateTableRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableSchema table_schema = 1; public static final int TABLE_SCHEMA_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** * <code>required .TableSchema table_schema = 1;</code> */ public boolean hasTableSchema() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { return tableSchema_; } /** * <code>required .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { return tableSchema_; } // repeated bytes split_keys = 2; public static final int SPLIT_KEYS_FIELD_NUMBER = 2; private java.util.List<com.google.protobuf.ByteString> splitKeys_; /** * <code>repeated bytes split_keys = 2;</code> */ public java.util.List<com.google.protobuf.ByteString> getSplitKeysList() { return splitKeys_; } /** * <code>repeated bytes split_keys = 2;</code> */ public int getSplitKeysCount() { return splitKeys_.size(); } /** * <code>repeated bytes split_keys = 2;</code> */ public com.google.protobuf.ByteString getSplitKeys(int index) { return splitKeys_.get(index); } private void initFields() { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); splitKeys_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableSchema()) { memoizedIsInitialized = 0; return false; } if (!getTableSchema().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableSchema_); } for (int i = 0; i < splitKeys_.size(); i++) { output.writeBytes(2, splitKeys_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableSchema_); } { int dataSize = 0; for (int i = 0; i < splitKeys_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(splitKeys_.get(i)); } size += dataSize; size += 1 * getSplitKeysList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) obj; boolean result = true; result = result && (hasTableSchema() == other.hasTableSchema()); if (hasTableSchema()) { result = result && getTableSchema() .equals(other.getTableSchema()); } result = result && getSplitKeysList() .equals(other.getSplitKeysList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableSchema()) { hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchema().hashCode(); } if (getSplitKeysCount() > 0) { hash = (37 * hash) + SPLIT_KEYS_FIELD_NUMBER; hash = (53 * hash) + getSplitKeysList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CreateTableRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); splitKeys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableSchemaBuilder_ == null) { result.tableSchema_ = tableSchema_; } else { result.tableSchema_ = tableSchemaBuilder_.build(); } if (((bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); bitField0_ = (bitField0_ & ~0x00000002); } result.splitKeys_ = splitKeys_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance()) return this; if (other.hasTableSchema()) { mergeTableSchema(other.getTableSchema()); } if (!other.splitKeys_.isEmpty()) { if (splitKeys_.isEmpty()) { splitKeys_ = other.splitKeys_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureSplitKeysIsMutable(); splitKeys_.addAll(other.splitKeys_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableSchema()) { return false; } if (!getTableSchema().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableSchema table_schema = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * <code>required .TableSchema table_schema = 1;</code> */ public boolean hasTableSchema() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { return tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } } /** * <code>required .TableSchema table_schema = 1;</code> */ public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableSchema_ = value; onChanged(); } else { tableSchemaBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableSchema table_schema = 1;</code> */ public Builder setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { tableSchema_ = builderForValue.build(); onChanged(); } else { tableSchemaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableSchema table_schema = 1;</code> */ public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); } else { tableSchema_ = value; } onChanged(); } else { tableSchemaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableSchema table_schema = 1;</code> */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); onChanged(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableSchemaFieldBuilder().getBuilder(); } /** * <code>required .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { return tableSchema_; } } /** * <code>required .TableSchema table_schema = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, getParentForChildren(), isClean()); tableSchema_ = null; } return tableSchemaBuilder_; } // repeated bytes split_keys = 2; private java.util.List<com.google.protobuf.ByteString> splitKeys_ = java.util.Collections.emptyList(); private void ensureSplitKeysIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = new java.util.ArrayList<com.google.protobuf.ByteString>(splitKeys_); bitField0_ |= 0x00000002; } } /** * <code>repeated bytes split_keys = 2;</code> */ public java.util.List<com.google.protobuf.ByteString> getSplitKeysList() { return java.util.Collections.unmodifiableList(splitKeys_); } /** * <code>repeated bytes split_keys = 2;</code> */ public int getSplitKeysCount() { return splitKeys_.size(); } /** * <code>repeated bytes split_keys = 2;</code> */ public com.google.protobuf.ByteString getSplitKeys(int index) { return splitKeys_.get(index); } /** * <code>repeated bytes split_keys = 2;</code> */ public Builder setSplitKeys( int index, com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureSplitKeysIsMutable(); splitKeys_.set(index, value); onChanged(); return this; } /** * <code>repeated bytes split_keys = 2;</code> */ public Builder addSplitKeys(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureSplitKeysIsMutable(); splitKeys_.add(value); onChanged(); return this; } /** * <code>repeated bytes split_keys = 2;</code> */ public Builder addAllSplitKeys( java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { ensureSplitKeysIsMutable(); super.addAll(values, splitKeys_); onChanged(); return this; } /** * <code>repeated bytes split_keys = 2;</code> */ public Builder clearSplitKeys() { splitKeys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:CreateTableRequest) } static { defaultInstance = new CreateTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CreateTableRequest) } public interface CreateTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code CreateTableResponse} */ public static final class CreateTableResponse extends com.google.protobuf.GeneratedMessage implements CreateTableResponseOrBuilder { // Use CreateTableResponse.newBuilder() to construct. private CreateTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CreateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CreateTableResponse defaultInstance; public static CreateTableResponse getDefaultInstance() { return defaultInstance; } public CreateTableResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.Builder.class); } public static com.google.protobuf.Parser<CreateTableResponse> PARSER = new com.google.protobuf.AbstractParser<CreateTableResponse>() { public CreateTableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateTableResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CreateTableResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CreateTableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:CreateTableResponse) } static { defaultInstance = new CreateTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CreateTableResponse) } public interface DeleteTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); } /** * Protobuf type {@code DeleteTableRequest} */ public static final class DeleteTableRequest extends com.google.protobuf.GeneratedMessage implements DeleteTableRequestOrBuilder { // Use DeleteTableRequest.newBuilder() to construct. private DeleteTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteTableRequest defaultInstance; public static DeleteTableRequest getDefaultInstance() { return defaultInstance; } public DeleteTableRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.Builder.class); } public static com.google.protobuf.Parser<DeleteTableRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteTableRequest>() { public DeleteTableRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteTableRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteTableRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteTableRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // @@protoc_insertion_point(builder_scope:DeleteTableRequest) } static { defaultInstance = new DeleteTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteTableRequest) } public interface DeleteTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code DeleteTableResponse} */ public static final class DeleteTableResponse extends com.google.protobuf.GeneratedMessage implements DeleteTableResponseOrBuilder { // Use DeleteTableResponse.newBuilder() to construct. private DeleteTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteTableResponse defaultInstance; public static DeleteTableResponse getDefaultInstance() { return defaultInstance; } public DeleteTableResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.Builder.class); } public static com.google.protobuf.Parser<DeleteTableResponse> PARSER = new com.google.protobuf.AbstractParser<DeleteTableResponse>() { public DeleteTableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteTableResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteTableResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteTableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:DeleteTableResponse) } static { defaultInstance = new DeleteTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteTableResponse) } public interface TruncateTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName tableName = 1; /** * <code>required .TableName tableName = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName tableName = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName tableName = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // optional bool preserveSplits = 2 [default = false]; /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ boolean hasPreserveSplits(); /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ boolean getPreserveSplits(); } /** * Protobuf type {@code TruncateTableRequest} */ public static final class TruncateTableRequest extends com.google.protobuf.GeneratedMessage implements TruncateTableRequestOrBuilder { // Use TruncateTableRequest.newBuilder() to construct. private TruncateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TruncateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TruncateTableRequest defaultInstance; public static TruncateTableRequest getDefaultInstance() { return defaultInstance; } public TruncateTableRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TruncateTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; preserveSplits_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.Builder.class); } public static com.google.protobuf.Parser<TruncateTableRequest> PARSER = new com.google.protobuf.AbstractParser<TruncateTableRequest>() { public TruncateTableRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TruncateTableRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TruncateTableRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName tableName = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // optional bool preserveSplits = 2 [default = false]; public static final int PRESERVESPLITS_FIELD_NUMBER = 2; private boolean preserveSplits_; /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ public boolean hasPreserveSplits() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ public boolean getPreserveSplits() { return preserveSplits_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); preserveSplits_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, preserveSplits_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, preserveSplits_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasPreserveSplits() == other.hasPreserveSplits()); if (hasPreserveSplits()) { result = result && (getPreserveSplits() == other.getPreserveSplits()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasPreserveSplits()) { hash = (37 * hash) + PRESERVESPLITS_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getPreserveSplits()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code TruncateTableRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); preserveSplits_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.preserveSplits_ = preserveSplits_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasPreserveSplits()) { setPreserveSplits(other.getPreserveSplits()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName tableName = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName tableName = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName tableName = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName tableName = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName tableName = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName tableName = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName tableName = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // optional bool preserveSplits = 2 [default = false]; private boolean preserveSplits_ ; /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ public boolean hasPreserveSplits() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ public boolean getPreserveSplits() { return preserveSplits_; } /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ public Builder setPreserveSplits(boolean value) { bitField0_ |= 0x00000002; preserveSplits_ = value; onChanged(); return this; } /** * <code>optional bool preserveSplits = 2 [default = false];</code> */ public Builder clearPreserveSplits() { bitField0_ = (bitField0_ & ~0x00000002); preserveSplits_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:TruncateTableRequest) } static { defaultInstance = new TruncateTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:TruncateTableRequest) } public interface TruncateTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code TruncateTableResponse} */ public static final class TruncateTableResponse extends com.google.protobuf.GeneratedMessage implements TruncateTableResponseOrBuilder { // Use TruncateTableResponse.newBuilder() to construct. private TruncateTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private TruncateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final TruncateTableResponse defaultInstance; public static TruncateTableResponse getDefaultInstance() { return defaultInstance; } public TruncateTableResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TruncateTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.Builder.class); } public static com.google.protobuf.Parser<TruncateTableResponse> PARSER = new com.google.protobuf.AbstractParser<TruncateTableResponse>() { public TruncateTableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TruncateTableResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<TruncateTableResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code TruncateTableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:TruncateTableResponse) } static { defaultInstance = new TruncateTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:TruncateTableResponse) } public interface EnableTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); } /** * Protobuf type {@code EnableTableRequest} */ public static final class EnableTableRequest extends com.google.protobuf.GeneratedMessage implements EnableTableRequestOrBuilder { // Use EnableTableRequest.newBuilder() to construct. private EnableTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private EnableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final EnableTableRequest defaultInstance; public static EnableTableRequest getDefaultInstance() { return defaultInstance; } public EnableTableRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnableTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.Builder.class); } public static com.google.protobuf.Parser<EnableTableRequest> PARSER = new com.google.protobuf.AbstractParser<EnableTableRequest>() { public EnableTableRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new EnableTableRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<EnableTableRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EnableTableRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // @@protoc_insertion_point(builder_scope:EnableTableRequest) } static { defaultInstance = new EnableTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableTableRequest) } public interface EnableTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code EnableTableResponse} */ public static final class EnableTableResponse extends com.google.protobuf.GeneratedMessage implements EnableTableResponseOrBuilder { // Use EnableTableResponse.newBuilder() to construct. private EnableTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private EnableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final EnableTableResponse defaultInstance; public static EnableTableResponse getDefaultInstance() { return defaultInstance; } public EnableTableResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnableTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.Builder.class); } public static com.google.protobuf.Parser<EnableTableResponse> PARSER = new com.google.protobuf.AbstractParser<EnableTableResponse>() { public EnableTableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new EnableTableResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<EnableTableResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EnableTableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:EnableTableResponse) } static { defaultInstance = new EnableTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableTableResponse) } public interface DisableTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); } /** * Protobuf type {@code DisableTableRequest} */ public static final class DisableTableRequest extends com.google.protobuf.GeneratedMessage implements DisableTableRequestOrBuilder { // Use DisableTableRequest.newBuilder() to construct. private DisableTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DisableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DisableTableRequest defaultInstance; public static DisableTableRequest getDefaultInstance() { return defaultInstance; } public DisableTableRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DisableTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.Builder.class); } public static com.google.protobuf.Parser<DisableTableRequest> PARSER = new com.google.protobuf.AbstractParser<DisableTableRequest>() { public DisableTableRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DisableTableRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DisableTableRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DisableTableRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // @@protoc_insertion_point(builder_scope:DisableTableRequest) } static { defaultInstance = new DisableTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DisableTableRequest) } public interface DisableTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code DisableTableResponse} */ public static final class DisableTableResponse extends com.google.protobuf.GeneratedMessage implements DisableTableResponseOrBuilder { // Use DisableTableResponse.newBuilder() to construct. private DisableTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DisableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DisableTableResponse defaultInstance; public static DisableTableResponse getDefaultInstance() { return defaultInstance; } public DisableTableResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DisableTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.Builder.class); } public static com.google.protobuf.Parser<DisableTableResponse> PARSER = new com.google.protobuf.AbstractParser<DisableTableResponse>() { public DisableTableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DisableTableResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DisableTableResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DisableTableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:DisableTableResponse) } static { defaultInstance = new DisableTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DisableTableResponse) } public interface ModifyTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // required .TableSchema table_schema = 2; /** * <code>required .TableSchema table_schema = 2;</code> */ boolean hasTableSchema(); /** * <code>required .TableSchema table_schema = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); /** * <code>required .TableSchema table_schema = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); } /** * Protobuf type {@code ModifyTableRequest} */ public static final class ModifyTableRequest extends com.google.protobuf.GeneratedMessage implements ModifyTableRequestOrBuilder { // Use ModifyTableRequest.newBuilder() to construct. private ModifyTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ModifyTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ModifyTableRequest defaultInstance; public static ModifyTableRequest getDefaultInstance() { return defaultInstance; } public ModifyTableRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ModifyTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = tableSchema_.toBuilder(); } tableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableSchema_); tableSchema_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.Builder.class); } public static com.google.protobuf.Parser<ModifyTableRequest> PARSER = new com.google.protobuf.AbstractParser<ModifyTableRequest>() { public ModifyTableRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ModifyTableRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ModifyTableRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // required .TableSchema table_schema = 2; public static final int TABLE_SCHEMA_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; /** * <code>required .TableSchema table_schema = 2;</code> */ public boolean hasTableSchema() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .TableSchema table_schema = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { return tableSchema_; } /** * <code>required .TableSchema table_schema = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { return tableSchema_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasTableSchema()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getTableSchema().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, tableSchema_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, tableSchema_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasTableSchema() == other.hasTableSchema()); if (hasTableSchema()) { result = result && getTableSchema() .equals(other.getTableSchema()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasTableSchema()) { hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchema().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ModifyTableRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getTableSchemaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (tableSchemaBuilder_ == null) { result.tableSchema_ = tableSchema_; } else { result.tableSchema_ = tableSchemaBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasTableSchema()) { mergeTableSchema(other.getTableSchema()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasTableSchema()) { return false; } if (!getTableName().isInitialized()) { return false; } if (!getTableSchema().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // required .TableSchema table_schema = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * <code>required .TableSchema table_schema = 2;</code> */ public boolean hasTableSchema() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .TableSchema table_schema = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { return tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } } /** * <code>required .TableSchema table_schema = 2;</code> */ public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableSchema_ = value; onChanged(); } else { tableSchemaBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .TableSchema table_schema = 2;</code> */ public Builder setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { tableSchema_ = builderForValue.build(); onChanged(); } else { tableSchemaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .TableSchema table_schema = 2;</code> */ public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); } else { tableSchema_ = value; } onChanged(); } else { tableSchemaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .TableSchema table_schema = 2;</code> */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); onChanged(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .TableSchema table_schema = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTableSchemaFieldBuilder().getBuilder(); } /** * <code>required .TableSchema table_schema = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { return tableSchema_; } } /** * <code>required .TableSchema table_schema = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, getParentForChildren(), isClean()); tableSchema_ = null; } return tableSchemaBuilder_; } // @@protoc_insertion_point(builder_scope:ModifyTableRequest) } static { defaultInstance = new ModifyTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyTableRequest) } public interface ModifyTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code ModifyTableResponse} */ public static final class ModifyTableResponse extends com.google.protobuf.GeneratedMessage implements ModifyTableResponseOrBuilder { // Use ModifyTableResponse.newBuilder() to construct. private ModifyTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ModifyTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ModifyTableResponse defaultInstance; public static ModifyTableResponse getDefaultInstance() { return defaultInstance; } public ModifyTableResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ModifyTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.Builder.class); } public static com.google.protobuf.Parser<ModifyTableResponse> PARSER = new com.google.protobuf.AbstractParser<ModifyTableResponse>() { public ModifyTableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ModifyTableResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ModifyTableResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ModifyTableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:ModifyTableResponse) } static { defaultInstance = new ModifyTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyTableResponse) } public interface CreateNamespaceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .NamespaceDescriptor namespaceDescriptor = 1; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ boolean hasNamespaceDescriptor(); /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(); /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(); } /** * Protobuf type {@code CreateNamespaceRequest} */ public static final class CreateNamespaceRequest extends com.google.protobuf.GeneratedMessage implements CreateNamespaceRequestOrBuilder { // Use CreateNamespaceRequest.newBuilder() to construct. private CreateNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CreateNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CreateNamespaceRequest defaultInstance; public static CreateNamespaceRequest getDefaultInstance() { return defaultInstance; } public CreateNamespaceRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = namespaceDescriptor_.toBuilder(); } namespaceDescriptor_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(namespaceDescriptor_); namespaceDescriptor_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.Builder.class); } public static com.google.protobuf.Parser<CreateNamespaceRequest> PARSER = new com.google.protobuf.AbstractParser<CreateNamespaceRequest>() { public CreateNamespaceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateNamespaceRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CreateNamespaceRequest> getParserForType() { return PARSER; } private int bitField0_; // required .NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public boolean hasNamespaceDescriptor() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { return namespaceDescriptor_; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { return namespaceDescriptor_; } private void initFields() { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; return false; } if (!getNamespaceDescriptor().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, namespaceDescriptor_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, namespaceDescriptor_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest) obj; boolean result = true; result = result && (hasNamespaceDescriptor() == other.hasNamespaceDescriptor()); if (hasNamespaceDescriptor()) { result = result && getNamespaceDescriptor() .equals(other.getNamespaceDescriptor()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceDescriptor()) { hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptor().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CreateNamespaceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (namespaceDescriptorBuilder_ == null) { result.namespaceDescriptor_ = namespaceDescriptor_; } else { result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance()) return this; if (other.hasNamespaceDescriptor()) { mergeNamespaceDescriptor(other.getNamespaceDescriptor()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { return false; } if (!getNamespaceDescriptor().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .NamespaceDescriptor namespaceDescriptor = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public boolean hasNamespaceDescriptor() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { return namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } namespaceDescriptor_ = value; onChanged(); } else { namespaceDescriptorBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = builderForValue.build(); onChanged(); } else { namespaceDescriptorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && namespaceDescriptor_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); } else { namespaceDescriptor_ = value; } onChanged(); } else { namespaceDescriptorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); onChanged(); } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNamespaceDescriptorFieldBuilder().getBuilder(); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { return namespaceDescriptor_; } } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( namespaceDescriptor_, getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } // @@protoc_insertion_point(builder_scope:CreateNamespaceRequest) } static { defaultInstance = new CreateNamespaceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CreateNamespaceRequest) } public interface CreateNamespaceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code CreateNamespaceResponse} */ public static final class CreateNamespaceResponse extends com.google.protobuf.GeneratedMessage implements CreateNamespaceResponseOrBuilder { // Use CreateNamespaceResponse.newBuilder() to construct. private CreateNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CreateNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CreateNamespaceResponse defaultInstance; public static CreateNamespaceResponse getDefaultInstance() { return defaultInstance; } public CreateNamespaceResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.Builder.class); } public static com.google.protobuf.Parser<CreateNamespaceResponse> PARSER = new com.google.protobuf.AbstractParser<CreateNamespaceResponse>() { public CreateNamespaceResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateNamespaceResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CreateNamespaceResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code CreateNamespaceResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:CreateNamespaceResponse) } static { defaultInstance = new CreateNamespaceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CreateNamespaceResponse) } public interface DeleteNamespaceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string namespaceName = 1; /** * <code>required string namespaceName = 1;</code> */ boolean hasNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ java.lang.String getNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ com.google.protobuf.ByteString getNamespaceNameBytes(); } /** * Protobuf type {@code DeleteNamespaceRequest} */ public static final class DeleteNamespaceRequest extends com.google.protobuf.GeneratedMessage implements DeleteNamespaceRequestOrBuilder { // Use DeleteNamespaceRequest.newBuilder() to construct. private DeleteNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteNamespaceRequest defaultInstance; public static DeleteNamespaceRequest getDefaultInstance() { return defaultInstance; } public DeleteNamespaceRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; namespaceName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.Builder.class); } public static com.google.protobuf.Parser<DeleteNamespaceRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteNamespaceRequest>() { public DeleteNamespaceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteNamespaceRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteNamespaceRequest> getParserForType() { return PARSER; } private int bitField0_; // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; private java.lang.Object namespaceName_; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namespaceName_ = s; } return s; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { namespaceName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNamespaceNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNamespaceNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest) obj; boolean result = true; result = result && (hasNamespaceName() == other.hasNamespaceName()); if (hasNamespaceName()) { result = result && getNamespaceName() .equals(other.getNamespaceName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceName()) { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteNamespaceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); namespaceName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namespaceName_ = namespaceName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance()) return this; if (other.hasNamespaceName()) { bitField0_ |= 0x00000001; namespaceName_ = other.namespaceName_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); namespaceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder clearNamespaceName() { bitField0_ = (bitField0_ & ~0x00000001); namespaceName_ = getDefaultInstance().getNamespaceName(); onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:DeleteNamespaceRequest) } static { defaultInstance = new DeleteNamespaceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteNamespaceRequest) } public interface DeleteNamespaceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code DeleteNamespaceResponse} */ public static final class DeleteNamespaceResponse extends com.google.protobuf.GeneratedMessage implements DeleteNamespaceResponseOrBuilder { // Use DeleteNamespaceResponse.newBuilder() to construct. private DeleteNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteNamespaceResponse defaultInstance; public static DeleteNamespaceResponse getDefaultInstance() { return defaultInstance; } public DeleteNamespaceResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.Builder.class); } public static com.google.protobuf.Parser<DeleteNamespaceResponse> PARSER = new com.google.protobuf.AbstractParser<DeleteNamespaceResponse>() { public DeleteNamespaceResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteNamespaceResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteNamespaceResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteNamespaceResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:DeleteNamespaceResponse) } static { defaultInstance = new DeleteNamespaceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteNamespaceResponse) } public interface ModifyNamespaceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .NamespaceDescriptor namespaceDescriptor = 1; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ boolean hasNamespaceDescriptor(); /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(); /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(); } /** * Protobuf type {@code ModifyNamespaceRequest} */ public static final class ModifyNamespaceRequest extends com.google.protobuf.GeneratedMessage implements ModifyNamespaceRequestOrBuilder { // Use ModifyNamespaceRequest.newBuilder() to construct. private ModifyNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ModifyNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ModifyNamespaceRequest defaultInstance; public static ModifyNamespaceRequest getDefaultInstance() { return defaultInstance; } public ModifyNamespaceRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ModifyNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = namespaceDescriptor_.toBuilder(); } namespaceDescriptor_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(namespaceDescriptor_); namespaceDescriptor_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.Builder.class); } public static com.google.protobuf.Parser<ModifyNamespaceRequest> PARSER = new com.google.protobuf.AbstractParser<ModifyNamespaceRequest>() { public ModifyNamespaceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ModifyNamespaceRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ModifyNamespaceRequest> getParserForType() { return PARSER; } private int bitField0_; // required .NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public boolean hasNamespaceDescriptor() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { return namespaceDescriptor_; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { return namespaceDescriptor_; } private void initFields() { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; return false; } if (!getNamespaceDescriptor().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, namespaceDescriptor_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, namespaceDescriptor_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest) obj; boolean result = true; result = result && (hasNamespaceDescriptor() == other.hasNamespaceDescriptor()); if (hasNamespaceDescriptor()) { result = result && getNamespaceDescriptor() .equals(other.getNamespaceDescriptor()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceDescriptor()) { hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptor().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ModifyNamespaceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (namespaceDescriptorBuilder_ == null) { result.namespaceDescriptor_ = namespaceDescriptor_; } else { result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance()) return this; if (other.hasNamespaceDescriptor()) { mergeNamespaceDescriptor(other.getNamespaceDescriptor()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { return false; } if (!getNamespaceDescriptor().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .NamespaceDescriptor namespaceDescriptor = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public boolean hasNamespaceDescriptor() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { return namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } namespaceDescriptor_ = value; onChanged(); } else { namespaceDescriptorBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = builderForValue.build(); onChanged(); } else { namespaceDescriptorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && namespaceDescriptor_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); } else { namespaceDescriptor_ = value; } onChanged(); } else { namespaceDescriptorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); onChanged(); } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNamespaceDescriptorFieldBuilder().getBuilder(); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { return namespaceDescriptor_; } } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( namespaceDescriptor_, getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } // @@protoc_insertion_point(builder_scope:ModifyNamespaceRequest) } static { defaultInstance = new ModifyNamespaceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyNamespaceRequest) } public interface ModifyNamespaceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code ModifyNamespaceResponse} */ public static final class ModifyNamespaceResponse extends com.google.protobuf.GeneratedMessage implements ModifyNamespaceResponseOrBuilder { // Use ModifyNamespaceResponse.newBuilder() to construct. private ModifyNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ModifyNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ModifyNamespaceResponse defaultInstance; public static ModifyNamespaceResponse getDefaultInstance() { return defaultInstance; } public ModifyNamespaceResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ModifyNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.Builder.class); } public static com.google.protobuf.Parser<ModifyNamespaceResponse> PARSER = new com.google.protobuf.AbstractParser<ModifyNamespaceResponse>() { public ModifyNamespaceResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ModifyNamespaceResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ModifyNamespaceResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ModifyNamespaceResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:ModifyNamespaceResponse) } static { defaultInstance = new ModifyNamespaceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyNamespaceResponse) } public interface GetNamespaceDescriptorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string namespaceName = 1; /** * <code>required string namespaceName = 1;</code> */ boolean hasNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ java.lang.String getNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ com.google.protobuf.ByteString getNamespaceNameBytes(); } /** * Protobuf type {@code GetNamespaceDescriptorRequest} */ public static final class GetNamespaceDescriptorRequest extends com.google.protobuf.GeneratedMessage implements GetNamespaceDescriptorRequestOrBuilder { // Use GetNamespaceDescriptorRequest.newBuilder() to construct. private GetNamespaceDescriptorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetNamespaceDescriptorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetNamespaceDescriptorRequest defaultInstance; public static GetNamespaceDescriptorRequest getDefaultInstance() { return defaultInstance; } public GetNamespaceDescriptorRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNamespaceDescriptorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; namespaceName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.Builder.class); } public static com.google.protobuf.Parser<GetNamespaceDescriptorRequest> PARSER = new com.google.protobuf.AbstractParser<GetNamespaceDescriptorRequest>() { public GetNamespaceDescriptorRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetNamespaceDescriptorRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetNamespaceDescriptorRequest> getParserForType() { return PARSER; } private int bitField0_; // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; private java.lang.Object namespaceName_; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namespaceName_ = s; } return s; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { namespaceName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNamespaceNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNamespaceNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) obj; boolean result = true; result = result && (hasNamespaceName() == other.hasNamespaceName()); if (hasNamespaceName()) { result = result && getNamespaceName() .equals(other.getNamespaceName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceName()) { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetNamespaceDescriptorRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); namespaceName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namespaceName_ = namespaceName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance()) return this; if (other.hasNamespaceName()) { bitField0_ |= 0x00000001; namespaceName_ = other.namespaceName_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); namespaceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder clearNamespaceName() { bitField0_ = (bitField0_ & ~0x00000001); namespaceName_ = getDefaultInstance().getNamespaceName(); onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:GetNamespaceDescriptorRequest) } static { defaultInstance = new GetNamespaceDescriptorRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetNamespaceDescriptorRequest) } public interface GetNamespaceDescriptorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .NamespaceDescriptor namespaceDescriptor = 1; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ boolean hasNamespaceDescriptor(); /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(); /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(); } /** * Protobuf type {@code GetNamespaceDescriptorResponse} */ public static final class GetNamespaceDescriptorResponse extends com.google.protobuf.GeneratedMessage implements GetNamespaceDescriptorResponseOrBuilder { // Use GetNamespaceDescriptorResponse.newBuilder() to construct. private GetNamespaceDescriptorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetNamespaceDescriptorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetNamespaceDescriptorResponse defaultInstance; public static GetNamespaceDescriptorResponse getDefaultInstance() { return defaultInstance; } public GetNamespaceDescriptorResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetNamespaceDescriptorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = namespaceDescriptor_.toBuilder(); } namespaceDescriptor_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(namespaceDescriptor_); namespaceDescriptor_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.Builder.class); } public static com.google.protobuf.Parser<GetNamespaceDescriptorResponse> PARSER = new com.google.protobuf.AbstractParser<GetNamespaceDescriptorResponse>() { public GetNamespaceDescriptorResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetNamespaceDescriptorResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetNamespaceDescriptorResponse> getParserForType() { return PARSER; } private int bitField0_; // required .NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public boolean hasNamespaceDescriptor() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { return namespaceDescriptor_; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { return namespaceDescriptor_; } private void initFields() { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceDescriptor()) { memoizedIsInitialized = 0; return false; } if (!getNamespaceDescriptor().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, namespaceDescriptor_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, namespaceDescriptor_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) obj; boolean result = true; result = result && (hasNamespaceDescriptor() == other.hasNamespaceDescriptor()); if (hasNamespaceDescriptor()) { result = result && getNamespaceDescriptor() .equals(other.getNamespaceDescriptor()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceDescriptor()) { hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptor().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetNamespaceDescriptorResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (namespaceDescriptorBuilder_ == null) { result.namespaceDescriptor_ = namespaceDescriptor_; } else { result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance()) return this; if (other.hasNamespaceDescriptor()) { mergeNamespaceDescriptor(other.getNamespaceDescriptor()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceDescriptor()) { return false; } if (!getNamespaceDescriptor().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .NamespaceDescriptor namespaceDescriptor = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public boolean hasNamespaceDescriptor() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { return namespaceDescriptor_; } else { return namespaceDescriptorBuilder_.getMessage(); } } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } namespaceDescriptor_ = value; onChanged(); } else { namespaceDescriptorBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = builderForValue.build(); onChanged(); } else { namespaceDescriptorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && namespaceDescriptor_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial(); } else { namespaceDescriptor_ = value; } onChanged(); } else { namespaceDescriptorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance(); onChanged(); } else { namespaceDescriptorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNamespaceDescriptorFieldBuilder().getBuilder(); } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilder(); } else { return namespaceDescriptor_; } } /** * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( namespaceDescriptor_, getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } // @@protoc_insertion_point(builder_scope:GetNamespaceDescriptorResponse) } static { defaultInstance = new GetNamespaceDescriptorResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetNamespaceDescriptorResponse) } public interface ListNamespaceDescriptorsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code ListNamespaceDescriptorsRequest} */ public static final class ListNamespaceDescriptorsRequest extends com.google.protobuf.GeneratedMessage implements ListNamespaceDescriptorsRequestOrBuilder { // Use ListNamespaceDescriptorsRequest.newBuilder() to construct. private ListNamespaceDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ListNamespaceDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ListNamespaceDescriptorsRequest defaultInstance; public static ListNamespaceDescriptorsRequest getDefaultInstance() { return defaultInstance; } public ListNamespaceDescriptorsRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListNamespaceDescriptorsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.Builder.class); } public static com.google.protobuf.Parser<ListNamespaceDescriptorsRequest> PARSER = new com.google.protobuf.AbstractParser<ListNamespaceDescriptorsRequest>() { public ListNamespaceDescriptorsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListNamespaceDescriptorsRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ListNamespaceDescriptorsRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ListNamespaceDescriptorsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:ListNamespaceDescriptorsRequest) } static { defaultInstance = new ListNamespaceDescriptorsRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ListNamespaceDescriptorsRequest) } public interface ListNamespaceDescriptorsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .NamespaceDescriptor namespaceDescriptor = 1; /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> getNamespaceDescriptorList(); /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(int index); /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ int getNamespaceDescriptorCount(); /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorOrBuilderList(); /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder( int index); } /** * Protobuf type {@code ListNamespaceDescriptorsResponse} */ public static final class ListNamespaceDescriptorsResponse extends com.google.protobuf.GeneratedMessage implements ListNamespaceDescriptorsResponseOrBuilder { // Use ListNamespaceDescriptorsResponse.newBuilder() to construct. private ListNamespaceDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ListNamespaceDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ListNamespaceDescriptorsResponse defaultInstance; public static ListNamespaceDescriptorsResponse getDefaultInstance() { return defaultInstance; } public ListNamespaceDescriptorsResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListNamespaceDescriptorsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { namespaceDescriptor_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor>(); mutable_bitField0_ |= 0x00000001; } namespaceDescriptor_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { namespaceDescriptor_ = java.util.Collections.unmodifiableList(namespaceDescriptor_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.Builder.class); } public static com.google.protobuf.Parser<ListNamespaceDescriptorsResponse> PARSER = new com.google.protobuf.AbstractParser<ListNamespaceDescriptorsResponse>() { public ListNamespaceDescriptorsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListNamespaceDescriptorsResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ListNamespaceDescriptorsResponse> getParserForType() { return PARSER; } // repeated .NamespaceDescriptor namespaceDescriptor = 1; public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> namespaceDescriptor_; /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> getNamespaceDescriptorList() { return namespaceDescriptor_; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorOrBuilderList() { return namespaceDescriptor_; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public int getNamespaceDescriptorCount() { return namespaceDescriptor_.size(); } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(int index) { return namespaceDescriptor_.get(index); } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder( int index) { return namespaceDescriptor_.get(index); } private void initFields() { namespaceDescriptor_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getNamespaceDescriptorCount(); i++) { if (!getNamespaceDescriptor(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < namespaceDescriptor_.size(); i++) { output.writeMessage(1, namespaceDescriptor_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < namespaceDescriptor_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, namespaceDescriptor_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) obj; boolean result = true; result = result && getNamespaceDescriptorList() .equals(other.getNamespaceDescriptorList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getNamespaceDescriptorCount() > 0) { hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER; hash = (53 * hash) + getNamespaceDescriptorList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ListNamespaceDescriptorsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getNamespaceDescriptorFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { namespaceDescriptorBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse(this); int from_bitField0_ = bitField0_; if (namespaceDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { namespaceDescriptor_ = java.util.Collections.unmodifiableList(namespaceDescriptor_); bitField0_ = (bitField0_ & ~0x00000001); } result.namespaceDescriptor_ = namespaceDescriptor_; } else { result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance()) return this; if (namespaceDescriptorBuilder_ == null) { if (!other.namespaceDescriptor_.isEmpty()) { if (namespaceDescriptor_.isEmpty()) { namespaceDescriptor_ = other.namespaceDescriptor_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.addAll(other.namespaceDescriptor_); } onChanged(); } } else { if (!other.namespaceDescriptor_.isEmpty()) { if (namespaceDescriptorBuilder_.isEmpty()) { namespaceDescriptorBuilder_.dispose(); namespaceDescriptorBuilder_ = null; namespaceDescriptor_ = other.namespaceDescriptor_; bitField0_ = (bitField0_ & ~0x00000001); namespaceDescriptorBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getNamespaceDescriptorFieldBuilder() : null; } else { namespaceDescriptorBuilder_.addAllMessages(other.namespaceDescriptor_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getNamespaceDescriptorCount(); i++) { if (!getNamespaceDescriptor(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .NamespaceDescriptor namespaceDescriptor = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> namespaceDescriptor_ = java.util.Collections.emptyList(); private void ensureNamespaceDescriptorIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { namespaceDescriptor_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor>(namespaceDescriptor_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_; /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> getNamespaceDescriptorList() { if (namespaceDescriptorBuilder_ == null) { return java.util.Collections.unmodifiableList(namespaceDescriptor_); } else { return namespaceDescriptorBuilder_.getMessageList(); } } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public int getNamespaceDescriptorCount() { if (namespaceDescriptorBuilder_ == null) { return namespaceDescriptor_.size(); } else { return namespaceDescriptorBuilder_.getCount(); } } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(int index) { if (namespaceDescriptorBuilder_ == null) { return namespaceDescriptor_.get(index); } else { return namespaceDescriptorBuilder_.getMessage(index); } } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.set(index, value); onChanged(); } else { namespaceDescriptorBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder setNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) { if (namespaceDescriptorBuilder_ == null) { ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.set(index, builderForValue.build()); onChanged(); } else { namespaceDescriptorBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder addNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.add(value); onChanged(); } else { namespaceDescriptorBuilder_.addMessage(value); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder addNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) { if (namespaceDescriptorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.add(index, value); onChanged(); } else { namespaceDescriptorBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder addNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) { if (namespaceDescriptorBuilder_ == null) { ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.add(builderForValue.build()); onChanged(); } else { namespaceDescriptorBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder addNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) { if (namespaceDescriptorBuilder_ == null) { ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.add(index, builderForValue.build()); onChanged(); } else { namespaceDescriptorBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder addAllNamespaceDescriptor( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> values) { if (namespaceDescriptorBuilder_ == null) { ensureNamespaceDescriptorIsMutable(); super.addAll(values, namespaceDescriptor_); onChanged(); } else { namespaceDescriptorBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder clearNamespaceDescriptor() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptor_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { namespaceDescriptorBuilder_.clear(); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public Builder removeNamespaceDescriptor(int index) { if (namespaceDescriptorBuilder_ == null) { ensureNamespaceDescriptorIsMutable(); namespaceDescriptor_.remove(index); onChanged(); } else { namespaceDescriptorBuilder_.remove(index); } return this; } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder( int index) { return getNamespaceDescriptorFieldBuilder().getBuilder(index); } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder( int index) { if (namespaceDescriptorBuilder_ == null) { return namespaceDescriptor_.get(index); } else { return namespaceDescriptorBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorOrBuilderList() { if (namespaceDescriptorBuilder_ != null) { return namespaceDescriptorBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(namespaceDescriptor_); } } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder addNamespaceDescriptorBuilder() { return getNamespaceDescriptorFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()); } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder addNamespaceDescriptorBuilder( int index) { return getNamespaceDescriptorFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()); } /** * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder> getNamespaceDescriptorBuilderList() { return getNamespaceDescriptorFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> getNamespaceDescriptorFieldBuilder() { if (namespaceDescriptorBuilder_ == null) { namespaceDescriptorBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>( namespaceDescriptor_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); namespaceDescriptor_ = null; } return namespaceDescriptorBuilder_; } // @@protoc_insertion_point(builder_scope:ListNamespaceDescriptorsResponse) } static { defaultInstance = new ListNamespaceDescriptorsResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ListNamespaceDescriptorsResponse) } public interface ListTableDescriptorsByNamespaceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string namespaceName = 1; /** * <code>required string namespaceName = 1;</code> */ boolean hasNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ java.lang.String getNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ com.google.protobuf.ByteString getNamespaceNameBytes(); } /** * Protobuf type {@code ListTableDescriptorsByNamespaceRequest} */ public static final class ListTableDescriptorsByNamespaceRequest extends com.google.protobuf.GeneratedMessage implements ListTableDescriptorsByNamespaceRequestOrBuilder { // Use ListTableDescriptorsByNamespaceRequest.newBuilder() to construct. private ListTableDescriptorsByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ListTableDescriptorsByNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ListTableDescriptorsByNamespaceRequest defaultInstance; public static ListTableDescriptorsByNamespaceRequest getDefaultInstance() { return defaultInstance; } public ListTableDescriptorsByNamespaceRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListTableDescriptorsByNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; namespaceName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.Builder.class); } public static com.google.protobuf.Parser<ListTableDescriptorsByNamespaceRequest> PARSER = new com.google.protobuf.AbstractParser<ListTableDescriptorsByNamespaceRequest>() { public ListTableDescriptorsByNamespaceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListTableDescriptorsByNamespaceRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ListTableDescriptorsByNamespaceRequest> getParserForType() { return PARSER; } private int bitField0_; // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; private java.lang.Object namespaceName_; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namespaceName_ = s; } return s; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { namespaceName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNamespaceNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNamespaceNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) obj; boolean result = true; result = result && (hasNamespaceName() == other.hasNamespaceName()); if (hasNamespaceName()) { result = result && getNamespaceName() .equals(other.getNamespaceName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceName()) { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ListTableDescriptorsByNamespaceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); namespaceName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namespaceName_ = namespaceName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance()) return this; if (other.hasNamespaceName()) { bitField0_ |= 0x00000001; namespaceName_ = other.namespaceName_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); namespaceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder clearNamespaceName() { bitField0_ = (bitField0_ & ~0x00000001); namespaceName_ = getDefaultInstance().getNamespaceName(); onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:ListTableDescriptorsByNamespaceRequest) } static { defaultInstance = new ListTableDescriptorsByNamespaceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ListTableDescriptorsByNamespaceRequest) } public interface ListTableDescriptorsByNamespaceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .TableSchema tableSchema = 1; /** * <code>repeated .TableSchema tableSchema = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList(); /** * <code>repeated .TableSchema tableSchema = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); /** * <code>repeated .TableSchema tableSchema = 1;</code> */ int getTableSchemaCount(); /** * <code>repeated .TableSchema tableSchema = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaOrBuilderList(); /** * <code>repeated .TableSchema tableSchema = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index); } /** * Protobuf type {@code ListTableDescriptorsByNamespaceResponse} */ public static final class ListTableDescriptorsByNamespaceResponse extends com.google.protobuf.GeneratedMessage implements ListTableDescriptorsByNamespaceResponseOrBuilder { // Use ListTableDescriptorsByNamespaceResponse.newBuilder() to construct. private ListTableDescriptorsByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ListTableDescriptorsByNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ListTableDescriptorsByNamespaceResponse defaultInstance; public static ListTableDescriptorsByNamespaceResponse getDefaultInstance() { return defaultInstance; } public ListTableDescriptorsByNamespaceResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListTableDescriptorsByNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>(); mutable_bitField0_ |= 0x00000001; } tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.Builder.class); } public static com.google.protobuf.Parser<ListTableDescriptorsByNamespaceResponse> PARSER = new com.google.protobuf.AbstractParser<ListTableDescriptorsByNamespaceResponse>() { public ListTableDescriptorsByNamespaceResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListTableDescriptorsByNamespaceResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ListTableDescriptorsByNamespaceResponse> getParserForType() { return PARSER; } // repeated .TableSchema tableSchema = 1; public static final int TABLESCHEMA_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_; /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() { return tableSchema_; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaOrBuilderList() { return tableSchema_; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public int getTableSchemaCount() { return tableSchema_.size(); } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { return tableSchema_.get(index); } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { return tableSchema_.get(index); } private void initFields() { tableSchema_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < tableSchema_.size(); i++) { output.writeMessage(1, tableSchema_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tableSchema_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableSchema_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) obj; boolean result = true; result = result && getTableSchemaList() .equals(other.getTableSchemaList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableSchemaCount() > 0) { hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchemaList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ListTableDescriptorsByNamespaceResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { tableSchema_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { tableSchemaBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse(this); int from_bitField0_ = bitField0_; if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); bitField0_ = (bitField0_ & ~0x00000001); } result.tableSchema_ = tableSchema_; } else { result.tableSchema_ = tableSchemaBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance()) return this; if (tableSchemaBuilder_ == null) { if (!other.tableSchema_.isEmpty()) { if (tableSchema_.isEmpty()) { tableSchema_ = other.tableSchema_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTableSchemaIsMutable(); tableSchema_.addAll(other.tableSchema_); } onChanged(); } } else { if (!other.tableSchema_.isEmpty()) { if (tableSchemaBuilder_.isEmpty()) { tableSchemaBuilder_.dispose(); tableSchemaBuilder_ = null; tableSchema_ = other.tableSchema_; bitField0_ = (bitField0_ & ~0x00000001); tableSchemaBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTableSchemaFieldBuilder() : null; } else { tableSchemaBuilder_.addAllMessages(other.tableSchema_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .TableSchema tableSchema = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_ = java.util.Collections.emptyList(); private void ensureTableSchemaIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>(tableSchema_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() { if (tableSchemaBuilder_ == null) { return java.util.Collections.unmodifiableList(tableSchema_); } else { return tableSchemaBuilder_.getMessageList(); } } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public int getTableSchemaCount() { if (tableSchemaBuilder_ == null) { return tableSchema_.size(); } else { return tableSchemaBuilder_.getCount(); } } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { if (tableSchemaBuilder_ == null) { return tableSchema_.get(index); } else { return tableSchemaBuilder_.getMessage(index); } } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableSchemaIsMutable(); tableSchema_.set(index, value); onChanged(); } else { tableSchemaBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.set(index, builderForValue.build()); onChanged(); } else { tableSchemaBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableSchemaIsMutable(); tableSchema_.add(value); onChanged(); } else { tableSchemaBuilder_.addMessage(value); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableSchemaIsMutable(); tableSchema_.add(index, value); onChanged(); } else { tableSchemaBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder addTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.add(builderForValue.build()); onChanged(); } else { tableSchemaBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.add(index, builderForValue.build()); onChanged(); } else { tableSchemaBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder addAllTableSchema( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> values) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); super.addAll(values, tableSchema_); onChanged(); } else { tableSchemaBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tableSchemaBuilder_.clear(); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public Builder removeTableSchema(int index) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.remove(index); onChanged(); } else { tableSchemaBuilder_.remove(index); } return this; } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder( int index) { return getTableSchemaFieldBuilder().getBuilder(index); } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { if (tableSchemaBuilder_ == null) { return tableSchema_.get(index); } else { return tableSchemaBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaOrBuilderList() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tableSchema_); } } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() { return getTableSchemaFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder( int index) { return getTableSchemaFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); } /** * <code>repeated .TableSchema tableSchema = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder> getTableSchemaBuilderList() { return getTableSchemaFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); tableSchema_ = null; } return tableSchemaBuilder_; } // @@protoc_insertion_point(builder_scope:ListTableDescriptorsByNamespaceResponse) } static { defaultInstance = new ListTableDescriptorsByNamespaceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ListTableDescriptorsByNamespaceResponse) } public interface ListTableNamesByNamespaceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string namespaceName = 1; /** * <code>required string namespaceName = 1;</code> */ boolean hasNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ java.lang.String getNamespaceName(); /** * <code>required string namespaceName = 1;</code> */ com.google.protobuf.ByteString getNamespaceNameBytes(); } /** * Protobuf type {@code ListTableNamesByNamespaceRequest} */ public static final class ListTableNamesByNamespaceRequest extends com.google.protobuf.GeneratedMessage implements ListTableNamesByNamespaceRequestOrBuilder { // Use ListTableNamesByNamespaceRequest.newBuilder() to construct. private ListTableNamesByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ListTableNamesByNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ListTableNamesByNamespaceRequest defaultInstance; public static ListTableNamesByNamespaceRequest getDefaultInstance() { return defaultInstance; } public ListTableNamesByNamespaceRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListTableNamesByNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; namespaceName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.Builder.class); } public static com.google.protobuf.Parser<ListTableNamesByNamespaceRequest> PARSER = new com.google.protobuf.AbstractParser<ListTableNamesByNamespaceRequest>() { public ListTableNamesByNamespaceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListTableNamesByNamespaceRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ListTableNamesByNamespaceRequest> getParserForType() { return PARSER; } private int bitField0_; // required string namespaceName = 1; public static final int NAMESPACENAME_FIELD_NUMBER = 1; private java.lang.Object namespaceName_; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namespaceName_ = s; } return s; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { namespaceName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasNamespaceName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNamespaceNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNamespaceNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) obj; boolean result = true; result = result && (hasNamespaceName() == other.hasNamespaceName()); if (hasNamespaceName()) { result = result && getNamespaceName() .equals(other.getNamespaceName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasNamespaceName()) { hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER; hash = (53 * hash) + getNamespaceName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ListTableNamesByNamespaceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); namespaceName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.namespaceName_ = namespaceName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance()) return this; if (other.hasNamespaceName()) { bitField0_ |= 0x00000001; namespaceName_ = other.namespaceName_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasNamespaceName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string namespaceName = 1; private java.lang.Object namespaceName_ = ""; /** * <code>required string namespaceName = 1;</code> */ public boolean hasNamespaceName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string namespaceName = 1;</code> */ public java.lang.String getNamespaceName() { java.lang.Object ref = namespaceName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); namespaceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string namespaceName = 1;</code> */ public com.google.protobuf.ByteString getNamespaceNameBytes() { java.lang.Object ref = namespaceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespaceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder clearNamespaceName() { bitField0_ = (bitField0_ & ~0x00000001); namespaceName_ = getDefaultInstance().getNamespaceName(); onChanged(); return this; } /** * <code>required string namespaceName = 1;</code> */ public Builder setNamespaceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; namespaceName_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:ListTableNamesByNamespaceRequest) } static { defaultInstance = new ListTableNamesByNamespaceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ListTableNamesByNamespaceRequest) } public interface ListTableNamesByNamespaceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .TableName tableName = 1; /** * <code>repeated .TableName tableName = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList(); /** * <code>repeated .TableName tableName = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index); /** * <code>repeated .TableName tableName = 1;</code> */ int getTableNameCount(); /** * <code>repeated .TableName tableName = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameOrBuilderList(); /** * <code>repeated .TableName tableName = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder( int index); } /** * Protobuf type {@code ListTableNamesByNamespaceResponse} */ public static final class ListTableNamesByNamespaceResponse extends com.google.protobuf.GeneratedMessage implements ListTableNamesByNamespaceResponseOrBuilder { // Use ListTableNamesByNamespaceResponse.newBuilder() to construct. private ListTableNamesByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ListTableNamesByNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ListTableNamesByNamespaceResponse defaultInstance; public static ListTableNamesByNamespaceResponse getDefaultInstance() { return defaultInstance; } public ListTableNamesByNamespaceResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListTableNamesByNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(); mutable_bitField0_ |= 0x00000001; } tableName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableName_ = java.util.Collections.unmodifiableList(tableName_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.Builder.class); } public static com.google.protobuf.Parser<ListTableNamesByNamespaceResponse> PARSER = new com.google.protobuf.AbstractParser<ListTableNamesByNamespaceResponse>() { public ListTableNamesByNamespaceResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListTableNamesByNamespaceResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ListTableNamesByNamespaceResponse> getParserForType() { return PARSER; } // repeated .TableName tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableName_; /** * <code>repeated .TableName tableName = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList() { return tableName_; } /** * <code>repeated .TableName tableName = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameOrBuilderList() { return tableName_; } /** * <code>repeated .TableName tableName = 1;</code> */ public int getTableNameCount() { return tableName_.size(); } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) { return tableName_.get(index); } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder( int index) { return tableName_.get(index); } private void initFields() { tableName_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getTableNameCount(); i++) { if (!getTableName(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < tableName_.size(); i++) { output.writeMessage(1, tableName_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tableName_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) obj; boolean result = true; result = result && getTableNameList() .equals(other.getTableNameList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableNameCount() > 0) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableNameList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ListTableNamesByNamespaceResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { tableNameBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse(this); int from_bitField0_ = bitField0_; if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { tableName_ = java.util.Collections.unmodifiableList(tableName_); bitField0_ = (bitField0_ & ~0x00000001); } result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance()) return this; if (tableNameBuilder_ == null) { if (!other.tableName_.isEmpty()) { if (tableName_.isEmpty()) { tableName_ = other.tableName_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTableNameIsMutable(); tableName_.addAll(other.tableName_); } onChanged(); } } else { if (!other.tableName_.isEmpty()) { if (tableNameBuilder_.isEmpty()) { tableNameBuilder_.dispose(); tableNameBuilder_ = null; tableName_ = other.tableName_; bitField0_ = (bitField0_ & ~0x00000001); tableNameBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTableNameFieldBuilder() : null; } else { tableNameBuilder_.addAllMessages(other.tableName_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableNameCount(); i++) { if (!getTableName(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .TableName tableName = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableName_ = java.util.Collections.emptyList(); private void ensureTableNameIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableName_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>repeated .TableName tableName = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList() { if (tableNameBuilder_ == null) { return java.util.Collections.unmodifiableList(tableName_); } else { return tableNameBuilder_.getMessageList(); } } /** * <code>repeated .TableName tableName = 1;</code> */ public int getTableNameCount() { if (tableNameBuilder_ == null) { return tableName_.size(); } else { return tableNameBuilder_.getCount(); } } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) { if (tableNameBuilder_ == null) { return tableName_.get(index); } else { return tableNameBuilder_.getMessage(index); } } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder setTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNameIsMutable(); tableName_.set(index, value); onChanged(); } else { tableNameBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder setTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { ensureTableNameIsMutable(); tableName_.set(index, builderForValue.build()); onChanged(); } else { tableNameBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder addTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNameIsMutable(); tableName_.add(value); onChanged(); } else { tableNameBuilder_.addMessage(value); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder addTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNameIsMutable(); tableName_.add(index, value); onChanged(); } else { tableNameBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder addTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { ensureTableNameIsMutable(); tableName_.add(builderForValue.build()); onChanged(); } else { tableNameBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder addTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { ensureTableNameIsMutable(); tableName_.add(index, builderForValue.build()); onChanged(); } else { tableNameBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder addAllTableName( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) { if (tableNameBuilder_ == null) { ensureTableNameIsMutable(); super.addAll(values, tableName_); onChanged(); } else { tableNameBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tableNameBuilder_.clear(); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public Builder removeTableName(int index) { if (tableNameBuilder_ == null) { ensureTableNameIsMutable(); tableName_.remove(index); onChanged(); } else { tableNameBuilder_.remove(index); } return this; } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder( int index) { return getTableNameFieldBuilder().getBuilder(index); } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder( int index) { if (tableNameBuilder_ == null) { return tableName_.get(index); } else { return tableNameBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .TableName tableName = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameOrBuilderList() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tableName_); } } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder() { return getTableNameFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()); } /** * <code>repeated .TableName tableName = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder( int index) { return getTableNameFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()); } /** * <code>repeated .TableName tableName = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder> getTableNameBuilderList() { return getTableNameFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // @@protoc_insertion_point(builder_scope:ListTableNamesByNamespaceResponse) } static { defaultInstance = new ListTableNamesByNamespaceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ListTableNamesByNamespaceResponse) } public interface ShutdownRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code ShutdownRequest} */ public static final class ShutdownRequest extends com.google.protobuf.GeneratedMessage implements ShutdownRequestOrBuilder { // Use ShutdownRequest.newBuilder() to construct. private ShutdownRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ShutdownRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ShutdownRequest defaultInstance; public static ShutdownRequest getDefaultInstance() { return defaultInstance; } public ShutdownRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ShutdownRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.Builder.class); } public static com.google.protobuf.Parser<ShutdownRequest> PARSER = new com.google.protobuf.AbstractParser<ShutdownRequest>() { public ShutdownRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ShutdownRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ShutdownRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ShutdownRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:ShutdownRequest) } static { defaultInstance = new ShutdownRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ShutdownRequest) } public interface ShutdownResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code ShutdownResponse} */ public static final class ShutdownResponse extends com.google.protobuf.GeneratedMessage implements ShutdownResponseOrBuilder { // Use ShutdownResponse.newBuilder() to construct. private ShutdownResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ShutdownResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ShutdownResponse defaultInstance; public static ShutdownResponse getDefaultInstance() { return defaultInstance; } public ShutdownResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ShutdownResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.Builder.class); } public static com.google.protobuf.Parser<ShutdownResponse> PARSER = new com.google.protobuf.AbstractParser<ShutdownResponse>() { public ShutdownResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ShutdownResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ShutdownResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ShutdownResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:ShutdownResponse) } static { defaultInstance = new ShutdownResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ShutdownResponse) } public interface StopMasterRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code StopMasterRequest} */ public static final class StopMasterRequest extends com.google.protobuf.GeneratedMessage implements StopMasterRequestOrBuilder { // Use StopMasterRequest.newBuilder() to construct. private StopMasterRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private StopMasterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final StopMasterRequest defaultInstance; public static StopMasterRequest getDefaultInstance() { return defaultInstance; } public StopMasterRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StopMasterRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.Builder.class); } public static com.google.protobuf.Parser<StopMasterRequest> PARSER = new com.google.protobuf.AbstractParser<StopMasterRequest>() { public StopMasterRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new StopMasterRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<StopMasterRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code StopMasterRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:StopMasterRequest) } static { defaultInstance = new StopMasterRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:StopMasterRequest) } public interface StopMasterResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code StopMasterResponse} */ public static final class StopMasterResponse extends com.google.protobuf.GeneratedMessage implements StopMasterResponseOrBuilder { // Use StopMasterResponse.newBuilder() to construct. private StopMasterResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private StopMasterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final StopMasterResponse defaultInstance; public static StopMasterResponse getDefaultInstance() { return defaultInstance; } public StopMasterResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StopMasterResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.Builder.class); } public static com.google.protobuf.Parser<StopMasterResponse> PARSER = new com.google.protobuf.AbstractParser<StopMasterResponse>() { public StopMasterResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new StopMasterResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<StopMasterResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code StopMasterResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:StopMasterResponse) } static { defaultInstance = new StopMasterResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:StopMasterResponse) } public interface BalanceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code BalanceRequest} */ public static final class BalanceRequest extends com.google.protobuf.GeneratedMessage implements BalanceRequestOrBuilder { // Use BalanceRequest.newBuilder() to construct. private BalanceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private BalanceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BalanceRequest defaultInstance; public static BalanceRequest getDefaultInstance() { return defaultInstance; } public BalanceRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BalanceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.Builder.class); } public static com.google.protobuf.Parser<BalanceRequest> PARSER = new com.google.protobuf.AbstractParser<BalanceRequest>() { public BalanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BalanceRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<BalanceRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code BalanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:BalanceRequest) } static { defaultInstance = new BalanceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:BalanceRequest) } public interface BalanceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool balancer_ran = 1; /** * <code>required bool balancer_ran = 1;</code> */ boolean hasBalancerRan(); /** * <code>required bool balancer_ran = 1;</code> */ boolean getBalancerRan(); } /** * Protobuf type {@code BalanceResponse} */ public static final class BalanceResponse extends com.google.protobuf.GeneratedMessage implements BalanceResponseOrBuilder { // Use BalanceResponse.newBuilder() to construct. private BalanceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private BalanceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BalanceResponse defaultInstance; public static BalanceResponse getDefaultInstance() { return defaultInstance; } public BalanceResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BalanceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; balancerRan_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.Builder.class); } public static com.google.protobuf.Parser<BalanceResponse> PARSER = new com.google.protobuf.AbstractParser<BalanceResponse>() { public BalanceResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BalanceResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<BalanceResponse> getParserForType() { return PARSER; } private int bitField0_; // required bool balancer_ran = 1; public static final int BALANCER_RAN_FIELD_NUMBER = 1; private boolean balancerRan_; /** * <code>required bool balancer_ran = 1;</code> */ public boolean hasBalancerRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool balancer_ran = 1;</code> */ public boolean getBalancerRan() { return balancerRan_; } private void initFields() { balancerRan_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasBalancerRan()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, balancerRan_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, balancerRan_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) obj; boolean result = true; result = result && (hasBalancerRan() == other.hasBalancerRan()); if (hasBalancerRan()) { result = result && (getBalancerRan() == other.getBalancerRan()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBalancerRan()) { hash = (37 * hash) + BALANCER_RAN_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getBalancerRan()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code BalanceResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); balancerRan_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.balancerRan_ = balancerRan_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance()) return this; if (other.hasBalancerRan()) { setBalancerRan(other.getBalancerRan()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasBalancerRan()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bool balancer_ran = 1; private boolean balancerRan_ ; /** * <code>required bool balancer_ran = 1;</code> */ public boolean hasBalancerRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool balancer_ran = 1;</code> */ public boolean getBalancerRan() { return balancerRan_; } /** * <code>required bool balancer_ran = 1;</code> */ public Builder setBalancerRan(boolean value) { bitField0_ |= 0x00000001; balancerRan_ = value; onChanged(); return this; } /** * <code>required bool balancer_ran = 1;</code> */ public Builder clearBalancerRan() { bitField0_ = (bitField0_ & ~0x00000001); balancerRan_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:BalanceResponse) } static { defaultInstance = new BalanceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:BalanceResponse) } public interface SetBalancerRunningRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool on = 1; /** * <code>required bool on = 1;</code> */ boolean hasOn(); /** * <code>required bool on = 1;</code> */ boolean getOn(); // optional bool synchronous = 2; /** * <code>optional bool synchronous = 2;</code> */ boolean hasSynchronous(); /** * <code>optional bool synchronous = 2;</code> */ boolean getSynchronous(); } /** * Protobuf type {@code SetBalancerRunningRequest} */ public static final class SetBalancerRunningRequest extends com.google.protobuf.GeneratedMessage implements SetBalancerRunningRequestOrBuilder { // Use SetBalancerRunningRequest.newBuilder() to construct. private SetBalancerRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SetBalancerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SetBalancerRunningRequest defaultInstance; public static SetBalancerRunningRequest getDefaultInstance() { return defaultInstance; } public SetBalancerRunningRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SetBalancerRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; on_ = input.readBool(); break; } case 16: { bitField0_ |= 0x00000002; synchronous_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.Builder.class); } public static com.google.protobuf.Parser<SetBalancerRunningRequest> PARSER = new com.google.protobuf.AbstractParser<SetBalancerRunningRequest>() { public SetBalancerRunningRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SetBalancerRunningRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SetBalancerRunningRequest> getParserForType() { return PARSER; } private int bitField0_; // required bool on = 1; public static final int ON_FIELD_NUMBER = 1; private boolean on_; /** * <code>required bool on = 1;</code> */ public boolean hasOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool on = 1;</code> */ public boolean getOn() { return on_; } // optional bool synchronous = 2; public static final int SYNCHRONOUS_FIELD_NUMBER = 2; private boolean synchronous_; /** * <code>optional bool synchronous = 2;</code> */ public boolean hasSynchronous() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool synchronous = 2;</code> */ public boolean getSynchronous() { return synchronous_; } private void initFields() { on_ = false; synchronous_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasOn()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, on_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, synchronous_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, on_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, synchronous_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest) obj; boolean result = true; result = result && (hasOn() == other.hasOn()); if (hasOn()) { result = result && (getOn() == other.getOn()); } result = result && (hasSynchronous() == other.hasSynchronous()); if (hasSynchronous()) { result = result && (getSynchronous() == other.getSynchronous()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOn()) { hash = (37 * hash) + ON_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getOn()); } if (hasSynchronous()) { hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getSynchronous()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code SetBalancerRunningRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); on_ = false; bitField0_ = (bitField0_ & ~0x00000001); synchronous_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.on_ = on_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.synchronous_ = synchronous_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance()) return this; if (other.hasOn()) { setOn(other.getOn()); } if (other.hasSynchronous()) { setSynchronous(other.getSynchronous()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasOn()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bool on = 1; private boolean on_ ; /** * <code>required bool on = 1;</code> */ public boolean hasOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool on = 1;</code> */ public boolean getOn() { return on_; } /** * <code>required bool on = 1;</code> */ public Builder setOn(boolean value) { bitField0_ |= 0x00000001; on_ = value; onChanged(); return this; } /** * <code>required bool on = 1;</code> */ public Builder clearOn() { bitField0_ = (bitField0_ & ~0x00000001); on_ = false; onChanged(); return this; } // optional bool synchronous = 2; private boolean synchronous_ ; /** * <code>optional bool synchronous = 2;</code> */ public boolean hasSynchronous() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool synchronous = 2;</code> */ public boolean getSynchronous() { return synchronous_; } /** * <code>optional bool synchronous = 2;</code> */ public Builder setSynchronous(boolean value) { bitField0_ |= 0x00000002; synchronous_ = value; onChanged(); return this; } /** * <code>optional bool synchronous = 2;</code> */ public Builder clearSynchronous() { bitField0_ = (bitField0_ & ~0x00000002); synchronous_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:SetBalancerRunningRequest) } static { defaultInstance = new SetBalancerRunningRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:SetBalancerRunningRequest) } public interface SetBalancerRunningResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool prev_balance_value = 1; /** * <code>optional bool prev_balance_value = 1;</code> */ boolean hasPrevBalanceValue(); /** * <code>optional bool prev_balance_value = 1;</code> */ boolean getPrevBalanceValue(); } /** * Protobuf type {@code SetBalancerRunningResponse} */ public static final class SetBalancerRunningResponse extends com.google.protobuf.GeneratedMessage implements SetBalancerRunningResponseOrBuilder { // Use SetBalancerRunningResponse.newBuilder() to construct. private SetBalancerRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SetBalancerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SetBalancerRunningResponse defaultInstance; public static SetBalancerRunningResponse getDefaultInstance() { return defaultInstance; } public SetBalancerRunningResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SetBalancerRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; prevBalanceValue_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.Builder.class); } public static com.google.protobuf.Parser<SetBalancerRunningResponse> PARSER = new com.google.protobuf.AbstractParser<SetBalancerRunningResponse>() { public SetBalancerRunningResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SetBalancerRunningResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SetBalancerRunningResponse> getParserForType() { return PARSER; } private int bitField0_; // optional bool prev_balance_value = 1; public static final int PREV_BALANCE_VALUE_FIELD_NUMBER = 1; private boolean prevBalanceValue_; /** * <code>optional bool prev_balance_value = 1;</code> */ public boolean hasPrevBalanceValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool prev_balance_value = 1;</code> */ public boolean getPrevBalanceValue() { return prevBalanceValue_; } private void initFields() { prevBalanceValue_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevBalanceValue_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevBalanceValue_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) obj; boolean result = true; result = result && (hasPrevBalanceValue() == other.hasPrevBalanceValue()); if (hasPrevBalanceValue()) { result = result && (getPrevBalanceValue() == other.getPrevBalanceValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevBalanceValue()) { hash = (37 * hash) + PREV_BALANCE_VALUE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getPrevBalanceValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code SetBalancerRunningResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); prevBalanceValue_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.prevBalanceValue_ = prevBalanceValue_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance()) return this; if (other.hasPrevBalanceValue()) { setPrevBalanceValue(other.getPrevBalanceValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bool prev_balance_value = 1; private boolean prevBalanceValue_ ; /** * <code>optional bool prev_balance_value = 1;</code> */ public boolean hasPrevBalanceValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool prev_balance_value = 1;</code> */ public boolean getPrevBalanceValue() { return prevBalanceValue_; } /** * <code>optional bool prev_balance_value = 1;</code> */ public Builder setPrevBalanceValue(boolean value) { bitField0_ |= 0x00000001; prevBalanceValue_ = value; onChanged(); return this; } /** * <code>optional bool prev_balance_value = 1;</code> */ public Builder clearPrevBalanceValue() { bitField0_ = (bitField0_ & ~0x00000001); prevBalanceValue_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:SetBalancerRunningResponse) } static { defaultInstance = new SetBalancerRunningResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) } public interface RunCatalogScanRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code RunCatalogScanRequest} */ public static final class RunCatalogScanRequest extends com.google.protobuf.GeneratedMessage implements RunCatalogScanRequestOrBuilder { // Use RunCatalogScanRequest.newBuilder() to construct. private RunCatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RunCatalogScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RunCatalogScanRequest defaultInstance; public static RunCatalogScanRequest getDefaultInstance() { return defaultInstance; } public RunCatalogScanRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RunCatalogScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.Builder.class); } public static com.google.protobuf.Parser<RunCatalogScanRequest> PARSER = new com.google.protobuf.AbstractParser<RunCatalogScanRequest>() { public RunCatalogScanRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RunCatalogScanRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RunCatalogScanRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RunCatalogScanRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:RunCatalogScanRequest) } static { defaultInstance = new RunCatalogScanRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RunCatalogScanRequest) } public interface RunCatalogScanResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional int32 scan_result = 1; /** * <code>optional int32 scan_result = 1;</code> */ boolean hasScanResult(); /** * <code>optional int32 scan_result = 1;</code> */ int getScanResult(); } /** * Protobuf type {@code RunCatalogScanResponse} */ public static final class RunCatalogScanResponse extends com.google.protobuf.GeneratedMessage implements RunCatalogScanResponseOrBuilder { // Use RunCatalogScanResponse.newBuilder() to construct. private RunCatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RunCatalogScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RunCatalogScanResponse defaultInstance; public static RunCatalogScanResponse getDefaultInstance() { return defaultInstance; } public RunCatalogScanResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RunCatalogScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; scanResult_ = input.readInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.Builder.class); } public static com.google.protobuf.Parser<RunCatalogScanResponse> PARSER = new com.google.protobuf.AbstractParser<RunCatalogScanResponse>() { public RunCatalogScanResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RunCatalogScanResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RunCatalogScanResponse> getParserForType() { return PARSER; } private int bitField0_; // optional int32 scan_result = 1; public static final int SCAN_RESULT_FIELD_NUMBER = 1; private int scanResult_; /** * <code>optional int32 scan_result = 1;</code> */ public boolean hasScanResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 scan_result = 1;</code> */ public int getScanResult() { return scanResult_; } private void initFields() { scanResult_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, scanResult_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, scanResult_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) obj; boolean result = true; result = result && (hasScanResult() == other.hasScanResult()); if (hasScanResult()) { result = result && (getScanResult() == other.getScanResult()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScanResult()) { hash = (37 * hash) + SCAN_RESULT_FIELD_NUMBER; hash = (53 * hash) + getScanResult(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RunCatalogScanResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); scanResult_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.scanResult_ = scanResult_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance()) return this; if (other.hasScanResult()) { setScanResult(other.getScanResult()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional int32 scan_result = 1; private int scanResult_ ; /** * <code>optional int32 scan_result = 1;</code> */ public boolean hasScanResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 scan_result = 1;</code> */ public int getScanResult() { return scanResult_; } /** * <code>optional int32 scan_result = 1;</code> */ public Builder setScanResult(int value) { bitField0_ |= 0x00000001; scanResult_ = value; onChanged(); return this; } /** * <code>optional int32 scan_result = 1;</code> */ public Builder clearScanResult() { bitField0_ = (bitField0_ & ~0x00000001); scanResult_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:RunCatalogScanResponse) } static { defaultInstance = new RunCatalogScanResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RunCatalogScanResponse) } public interface EnableCatalogJanitorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool enable = 1; /** * <code>required bool enable = 1;</code> */ boolean hasEnable(); /** * <code>required bool enable = 1;</code> */ boolean getEnable(); } /** * Protobuf type {@code EnableCatalogJanitorRequest} */ public static final class EnableCatalogJanitorRequest extends com.google.protobuf.GeneratedMessage implements EnableCatalogJanitorRequestOrBuilder { // Use EnableCatalogJanitorRequest.newBuilder() to construct. private EnableCatalogJanitorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private EnableCatalogJanitorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final EnableCatalogJanitorRequest defaultInstance; public static EnableCatalogJanitorRequest getDefaultInstance() { return defaultInstance; } public EnableCatalogJanitorRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnableCatalogJanitorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; enable_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.Builder.class); } public static com.google.protobuf.Parser<EnableCatalogJanitorRequest> PARSER = new com.google.protobuf.AbstractParser<EnableCatalogJanitorRequest>() { public EnableCatalogJanitorRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new EnableCatalogJanitorRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<EnableCatalogJanitorRequest> getParserForType() { return PARSER; } private int bitField0_; // required bool enable = 1; public static final int ENABLE_FIELD_NUMBER = 1; private boolean enable_; /** * <code>required bool enable = 1;</code> */ public boolean hasEnable() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool enable = 1;</code> */ public boolean getEnable() { return enable_; } private void initFields() { enable_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasEnable()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enable_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enable_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) obj; boolean result = true; result = result && (hasEnable() == other.hasEnable()); if (hasEnable()) { result = result && (getEnable() == other.getEnable()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnable()) { hash = (37 * hash) + ENABLE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getEnable()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EnableCatalogJanitorRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); enable_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.enable_ = enable_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance()) return this; if (other.hasEnable()) { setEnable(other.getEnable()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasEnable()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bool enable = 1; private boolean enable_ ; /** * <code>required bool enable = 1;</code> */ public boolean hasEnable() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool enable = 1;</code> */ public boolean getEnable() { return enable_; } /** * <code>required bool enable = 1;</code> */ public Builder setEnable(boolean value) { bitField0_ |= 0x00000001; enable_ = value; onChanged(); return this; } /** * <code>required bool enable = 1;</code> */ public Builder clearEnable() { bitField0_ = (bitField0_ & ~0x00000001); enable_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorRequest) } static { defaultInstance = new EnableCatalogJanitorRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableCatalogJanitorRequest) } public interface EnableCatalogJanitorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool prev_value = 1; /** * <code>optional bool prev_value = 1;</code> */ boolean hasPrevValue(); /** * <code>optional bool prev_value = 1;</code> */ boolean getPrevValue(); } /** * Protobuf type {@code EnableCatalogJanitorResponse} */ public static final class EnableCatalogJanitorResponse extends com.google.protobuf.GeneratedMessage implements EnableCatalogJanitorResponseOrBuilder { // Use EnableCatalogJanitorResponse.newBuilder() to construct. private EnableCatalogJanitorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private EnableCatalogJanitorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final EnableCatalogJanitorResponse defaultInstance; public static EnableCatalogJanitorResponse getDefaultInstance() { return defaultInstance; } public EnableCatalogJanitorResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private EnableCatalogJanitorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; prevValue_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.Builder.class); } public static com.google.protobuf.Parser<EnableCatalogJanitorResponse> PARSER = new com.google.protobuf.AbstractParser<EnableCatalogJanitorResponse>() { public EnableCatalogJanitorResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new EnableCatalogJanitorResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<EnableCatalogJanitorResponse> getParserForType() { return PARSER; } private int bitField0_; // optional bool prev_value = 1; public static final int PREV_VALUE_FIELD_NUMBER = 1; private boolean prevValue_; /** * <code>optional bool prev_value = 1;</code> */ public boolean hasPrevValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool prev_value = 1;</code> */ public boolean getPrevValue() { return prevValue_; } private void initFields() { prevValue_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevValue_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevValue_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) obj; boolean result = true; result = result && (hasPrevValue() == other.hasPrevValue()); if (hasPrevValue()) { result = result && (getPrevValue() == other.getPrevValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevValue()) { hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getPrevValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code EnableCatalogJanitorResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); prevValue_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.prevValue_ = prevValue_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance()) return this; if (other.hasPrevValue()) { setPrevValue(other.getPrevValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bool prev_value = 1; private boolean prevValue_ ; /** * <code>optional bool prev_value = 1;</code> */ public boolean hasPrevValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool prev_value = 1;</code> */ public boolean getPrevValue() { return prevValue_; } /** * <code>optional bool prev_value = 1;</code> */ public Builder setPrevValue(boolean value) { bitField0_ |= 0x00000001; prevValue_ = value; onChanged(); return this; } /** * <code>optional bool prev_value = 1;</code> */ public Builder clearPrevValue() { bitField0_ = (bitField0_ & ~0x00000001); prevValue_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorResponse) } static { defaultInstance = new EnableCatalogJanitorResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableCatalogJanitorResponse) } public interface IsCatalogJanitorEnabledRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code IsCatalogJanitorEnabledRequest} */ public static final class IsCatalogJanitorEnabledRequest extends com.google.protobuf.GeneratedMessage implements IsCatalogJanitorEnabledRequestOrBuilder { // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct. private IsCatalogJanitorEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsCatalogJanitorEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsCatalogJanitorEnabledRequest defaultInstance; public static IsCatalogJanitorEnabledRequest getDefaultInstance() { return defaultInstance; } public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsCatalogJanitorEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.Builder.class); } public static com.google.protobuf.Parser<IsCatalogJanitorEnabledRequest> PARSER = new com.google.protobuf.AbstractParser<IsCatalogJanitorEnabledRequest>() { public IsCatalogJanitorEnabledRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsCatalogJanitorEnabledRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsCatalogJanitorEnabledRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsCatalogJanitorEnabledRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledRequest) } static { defaultInstance = new IsCatalogJanitorEnabledRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledRequest) } public interface IsCatalogJanitorEnabledResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool value = 1; /** * <code>required bool value = 1;</code> */ boolean hasValue(); /** * <code>required bool value = 1;</code> */ boolean getValue(); } /** * Protobuf type {@code IsCatalogJanitorEnabledResponse} */ public static final class IsCatalogJanitorEnabledResponse extends com.google.protobuf.GeneratedMessage implements IsCatalogJanitorEnabledResponseOrBuilder { // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct. private IsCatalogJanitorEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsCatalogJanitorEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsCatalogJanitorEnabledResponse defaultInstance; public static IsCatalogJanitorEnabledResponse getDefaultInstance() { return defaultInstance; } public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsCatalogJanitorEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; value_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.Builder.class); } public static com.google.protobuf.Parser<IsCatalogJanitorEnabledResponse> PARSER = new com.google.protobuf.AbstractParser<IsCatalogJanitorEnabledResponse>() { public IsCatalogJanitorEnabledResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsCatalogJanitorEnabledResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsCatalogJanitorEnabledResponse> getParserForType() { return PARSER; } private int bitField0_; // required bool value = 1; public static final int VALUE_FIELD_NUMBER = 1; private boolean value_; /** * <code>required bool value = 1;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool value = 1;</code> */ public boolean getValue() { return value_; } private void initFields() { value_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, value_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, value_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) obj; boolean result = true; result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && (getValue() == other.getValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsCatalogJanitorEnabledResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); value_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()) return this; if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasValue()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bool value = 1; private boolean value_ ; /** * <code>required bool value = 1;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool value = 1;</code> */ public boolean getValue() { return value_; } /** * <code>required bool value = 1;</code> */ public Builder setValue(boolean value) { bitField0_ |= 0x00000001; value_ = value; onChanged(); return this; } /** * <code>required bool value = 1;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000001); value_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledResponse) } static { defaultInstance = new IsCatalogJanitorEnabledResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse) } public interface SnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .SnapshotDescription snapshot = 1; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ boolean hasSnapshot(); /** * <code>required .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); /** * <code>required .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code SnapshotRequest} */ public static final class SnapshotRequest extends com.google.protobuf.GeneratedMessage implements SnapshotRequestOrBuilder { // Use SnapshotRequest.newBuilder() to construct. private SnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SnapshotRequest defaultInstance; public static SnapshotRequest getDefaultInstance() { return defaultInstance; } public SnapshotRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); } public static com.google.protobuf.Parser<SnapshotRequest> PARSER = new com.google.protobuf.AbstractParser<SnapshotRequest>() { public SnapshotRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SnapshotRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SnapshotRequest> getParserForType() { return PARSER; } private int bitField0_; // required .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasSnapshot()) { memoizedIsInitialized = 0; return false; } if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest) obj; boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code SnapshotRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasSnapshot()) { return false; } if (!getSnapshot().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:SnapshotRequest) } static { defaultInstance = new SnapshotRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:SnapshotRequest) } public interface SnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int64 expected_timeout = 1; /** * <code>required int64 expected_timeout = 1;</code> */ boolean hasExpectedTimeout(); /** * <code>required int64 expected_timeout = 1;</code> */ long getExpectedTimeout(); } /** * Protobuf type {@code SnapshotResponse} */ public static final class SnapshotResponse extends com.google.protobuf.GeneratedMessage implements SnapshotResponseOrBuilder { // Use SnapshotResponse.newBuilder() to construct. private SnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private SnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final SnapshotResponse defaultInstance; public static SnapshotResponse getDefaultInstance() { return defaultInstance; } public SnapshotResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; expectedTimeout_ = input.readInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); } public static com.google.protobuf.Parser<SnapshotResponse> PARSER = new com.google.protobuf.AbstractParser<SnapshotResponse>() { public SnapshotResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SnapshotResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<SnapshotResponse> getParserForType() { return PARSER; } private int bitField0_; // required int64 expected_timeout = 1; public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1; private long expectedTimeout_; /** * <code>required int64 expected_timeout = 1;</code> */ public boolean hasExpectedTimeout() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 expected_timeout = 1;</code> */ public long getExpectedTimeout() { return expectedTimeout_; } private void initFields() { expectedTimeout_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasExpectedTimeout()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, expectedTimeout_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, expectedTimeout_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) obj; boolean result = true; result = result && (hasExpectedTimeout() == other.hasExpectedTimeout()); if (hasExpectedTimeout()) { result = result && (getExpectedTimeout() == other.getExpectedTimeout()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExpectedTimeout()) { hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + hashLong(getExpectedTimeout()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code SnapshotResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); expectedTimeout_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.expectedTimeout_ = expectedTimeout_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()) return this; if (other.hasExpectedTimeout()) { setExpectedTimeout(other.getExpectedTimeout()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasExpectedTimeout()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required int64 expected_timeout = 1; private long expectedTimeout_ ; /** * <code>required int64 expected_timeout = 1;</code> */ public boolean hasExpectedTimeout() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required int64 expected_timeout = 1;</code> */ public long getExpectedTimeout() { return expectedTimeout_; } /** * <code>required int64 expected_timeout = 1;</code> */ public Builder setExpectedTimeout(long value) { bitField0_ |= 0x00000001; expectedTimeout_ = value; onChanged(); return this; } /** * <code>required int64 expected_timeout = 1;</code> */ public Builder clearExpectedTimeout() { bitField0_ = (bitField0_ & ~0x00000001); expectedTimeout_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:SnapshotResponse) } static { defaultInstance = new SnapshotResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:SnapshotResponse) } public interface GetCompletedSnapshotsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code GetCompletedSnapshotsRequest} */ public static final class GetCompletedSnapshotsRequest extends com.google.protobuf.GeneratedMessage implements GetCompletedSnapshotsRequestOrBuilder { // Use GetCompletedSnapshotsRequest.newBuilder() to construct. private GetCompletedSnapshotsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetCompletedSnapshotsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetCompletedSnapshotsRequest defaultInstance; public static GetCompletedSnapshotsRequest getDefaultInstance() { return defaultInstance; } public GetCompletedSnapshotsRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetCompletedSnapshotsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); } public static com.google.protobuf.Parser<GetCompletedSnapshotsRequest> PARSER = new com.google.protobuf.AbstractParser<GetCompletedSnapshotsRequest>() { public GetCompletedSnapshotsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetCompletedSnapshotsRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetCompletedSnapshotsRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetCompletedSnapshotsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:GetCompletedSnapshotsRequest) } static { defaultInstance = new GetCompletedSnapshotsRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetCompletedSnapshotsRequest) } public interface GetCompletedSnapshotsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .SnapshotDescription snapshots = 1; /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList(); /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index); /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ int getSnapshotsCount(); /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotsOrBuilderList(); /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index); } /** * Protobuf type {@code GetCompletedSnapshotsResponse} */ public static final class GetCompletedSnapshotsResponse extends com.google.protobuf.GeneratedMessage implements GetCompletedSnapshotsResponseOrBuilder { // Use GetCompletedSnapshotsResponse.newBuilder() to construct. private GetCompletedSnapshotsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetCompletedSnapshotsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetCompletedSnapshotsResponse defaultInstance; public static GetCompletedSnapshotsResponse getDefaultInstance() { return defaultInstance; } public GetCompletedSnapshotsResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetCompletedSnapshotsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription>(); mutable_bitField0_ |= 0x00000001; } snapshots_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { snapshots_ = java.util.Collections.unmodifiableList(snapshots_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.Builder.class); } public static com.google.protobuf.Parser<GetCompletedSnapshotsResponse> PARSER = new com.google.protobuf.AbstractParser<GetCompletedSnapshotsResponse>() { public GetCompletedSnapshotsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetCompletedSnapshotsResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetCompletedSnapshotsResponse> getParserForType() { return PARSER; } // repeated .SnapshotDescription snapshots = 1; public static final int SNAPSHOTS_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> snapshots_; /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList() { return snapshots_; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotsOrBuilderList() { return snapshots_; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public int getSnapshotsCount() { return snapshots_.size(); } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) { return snapshots_.get(index); } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index) { return snapshots_.get(index); } private void initFields() { snapshots_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getSnapshotsCount(); i++) { if (!getSnapshots(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < snapshots_.size(); i++) { output.writeMessage(1, snapshots_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < snapshots_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshots_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) obj; boolean result = true; result = result && getSnapshotsList() .equals(other.getSnapshotsList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getSnapshotsCount() > 0) { hash = (37 * hash) + SNAPSHOTS_FIELD_NUMBER; hash = (53 * hash) + getSnapshotsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetCompletedSnapshotsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotsFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (snapshotsBuilder_ == null) { snapshots_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { snapshotsBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse(this); int from_bitField0_ = bitField0_; if (snapshotsBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { snapshots_ = java.util.Collections.unmodifiableList(snapshots_); bitField0_ = (bitField0_ & ~0x00000001); } result.snapshots_ = snapshots_; } else { result.snapshots_ = snapshotsBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance()) return this; if (snapshotsBuilder_ == null) { if (!other.snapshots_.isEmpty()) { if (snapshots_.isEmpty()) { snapshots_ = other.snapshots_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSnapshotsIsMutable(); snapshots_.addAll(other.snapshots_); } onChanged(); } } else { if (!other.snapshots_.isEmpty()) { if (snapshotsBuilder_.isEmpty()) { snapshotsBuilder_.dispose(); snapshotsBuilder_ = null; snapshots_ = other.snapshots_; bitField0_ = (bitField0_ & ~0x00000001); snapshotsBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getSnapshotsFieldBuilder() : null; } else { snapshotsBuilder_.addAllMessages(other.snapshots_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getSnapshotsCount(); i++) { if (!getSnapshots(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .SnapshotDescription snapshots = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> snapshots_ = java.util.Collections.emptyList(); private void ensureSnapshotsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription>(snapshots_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_; /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList() { if (snapshotsBuilder_ == null) { return java.util.Collections.unmodifiableList(snapshots_); } else { return snapshotsBuilder_.getMessageList(); } } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public int getSnapshotsCount() { if (snapshotsBuilder_ == null) { return snapshots_.size(); } else { return snapshotsBuilder_.getCount(); } } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) { if (snapshotsBuilder_ == null) { return snapshots_.get(index); } else { return snapshotsBuilder_.getMessage(index); } } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder setSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSnapshotsIsMutable(); snapshots_.set(index, value); onChanged(); } else { snapshotsBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder setSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.set(index, builderForValue.build()); onChanged(); } else { snapshotsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder addSnapshots(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSnapshotsIsMutable(); snapshots_.add(value); onChanged(); } else { snapshotsBuilder_.addMessage(value); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder addSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSnapshotsIsMutable(); snapshots_.add(index, value); onChanged(); } else { snapshotsBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder addSnapshots( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.add(builderForValue.build()); onChanged(); } else { snapshotsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder addSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.add(index, builderForValue.build()); onChanged(); } else { snapshotsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder addAllSnapshots( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> values) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); super.addAll(values, snapshots_); onChanged(); } else { snapshotsBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder clearSnapshots() { if (snapshotsBuilder_ == null) { snapshots_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { snapshotsBuilder_.clear(); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public Builder removeSnapshots(int index) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.remove(index); onChanged(); } else { snapshotsBuilder_.remove(index); } return this; } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotsBuilder( int index) { return getSnapshotsFieldBuilder().getBuilder(index); } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index) { if (snapshotsBuilder_ == null) { return snapshots_.get(index); } else { return snapshotsBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotsOrBuilderList() { if (snapshotsBuilder_ != null) { return snapshotsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(snapshots_); } } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder() { return getSnapshotsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()); } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder( int index) { return getSnapshotsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()); } /** * <code>repeated .SnapshotDescription snapshots = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder> getSnapshotsBuilderList() { return getSnapshotsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotsFieldBuilder() { if (snapshotsBuilder_ == null) { snapshotsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshots_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); snapshots_ = null; } return snapshotsBuilder_; } // @@protoc_insertion_point(builder_scope:GetCompletedSnapshotsResponse) } static { defaultInstance = new GetCompletedSnapshotsResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetCompletedSnapshotsResponse) } public interface DeleteSnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .SnapshotDescription snapshot = 1; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ boolean hasSnapshot(); /** * <code>required .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); /** * <code>required .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code DeleteSnapshotRequest} */ public static final class DeleteSnapshotRequest extends com.google.protobuf.GeneratedMessage implements DeleteSnapshotRequestOrBuilder { // Use DeleteSnapshotRequest.newBuilder() to construct. private DeleteSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteSnapshotRequest defaultInstance; public static DeleteSnapshotRequest getDefaultInstance() { return defaultInstance; } public DeleteSnapshotRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteSnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.Builder.class); } public static com.google.protobuf.Parser<DeleteSnapshotRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteSnapshotRequest>() { public DeleteSnapshotRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteSnapshotRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteSnapshotRequest> getParserForType() { return PARSER; } private int bitField0_; // required .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasSnapshot()) { memoizedIsInitialized = 0; return false; } if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest) obj; boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteSnapshotRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasSnapshot()) { return false; } if (!getSnapshot().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:DeleteSnapshotRequest) } static { defaultInstance = new DeleteSnapshotRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteSnapshotRequest) } public interface DeleteSnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code DeleteSnapshotResponse} */ public static final class DeleteSnapshotResponse extends com.google.protobuf.GeneratedMessage implements DeleteSnapshotResponseOrBuilder { // Use DeleteSnapshotResponse.newBuilder() to construct. private DeleteSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private DeleteSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final DeleteSnapshotResponse defaultInstance; public static DeleteSnapshotResponse getDefaultInstance() { return defaultInstance; } public DeleteSnapshotResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteSnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.Builder.class); } public static com.google.protobuf.Parser<DeleteSnapshotResponse> PARSER = new com.google.protobuf.AbstractParser<DeleteSnapshotResponse>() { public DeleteSnapshotResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteSnapshotResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<DeleteSnapshotResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code DeleteSnapshotResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:DeleteSnapshotResponse) } static { defaultInstance = new DeleteSnapshotResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteSnapshotResponse) } public interface RestoreSnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .SnapshotDescription snapshot = 1; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ boolean hasSnapshot(); /** * <code>required .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); /** * <code>required .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code RestoreSnapshotRequest} */ public static final class RestoreSnapshotRequest extends com.google.protobuf.GeneratedMessage implements RestoreSnapshotRequestOrBuilder { // Use RestoreSnapshotRequest.newBuilder() to construct. private RestoreSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RestoreSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RestoreSnapshotRequest defaultInstance; public static RestoreSnapshotRequest getDefaultInstance() { return defaultInstance; } public RestoreSnapshotRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RestoreSnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.Builder.class); } public static com.google.protobuf.Parser<RestoreSnapshotRequest> PARSER = new com.google.protobuf.AbstractParser<RestoreSnapshotRequest>() { public RestoreSnapshotRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RestoreSnapshotRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RestoreSnapshotRequest> getParserForType() { return PARSER; } private int bitField0_; // required .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasSnapshot()) { memoizedIsInitialized = 0; return false; } if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest) obj; boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RestoreSnapshotRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasSnapshot()) { return false; } if (!getSnapshot().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>required .SnapshotDescription snapshot = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:RestoreSnapshotRequest) } static { defaultInstance = new RestoreSnapshotRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RestoreSnapshotRequest) } public interface RestoreSnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code RestoreSnapshotResponse} */ public static final class RestoreSnapshotResponse extends com.google.protobuf.GeneratedMessage implements RestoreSnapshotResponseOrBuilder { // Use RestoreSnapshotResponse.newBuilder() to construct. private RestoreSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RestoreSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RestoreSnapshotResponse defaultInstance; public static RestoreSnapshotResponse getDefaultInstance() { return defaultInstance; } public RestoreSnapshotResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RestoreSnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.Builder.class); } public static com.google.protobuf.Parser<RestoreSnapshotResponse> PARSER = new com.google.protobuf.AbstractParser<RestoreSnapshotResponse>() { public RestoreSnapshotResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RestoreSnapshotResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RestoreSnapshotResponse> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code RestoreSnapshotResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:RestoreSnapshotResponse) } static { defaultInstance = new RestoreSnapshotResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RestoreSnapshotResponse) } public interface IsSnapshotDoneRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .SnapshotDescription snapshot = 1; /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ boolean hasSnapshot(); /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code IsSnapshotDoneRequest} * * <pre> * if you don't send the snapshot, then you will get it back * in the response (if the snapshot is done) so you can check the snapshot * </pre> */ public static final class IsSnapshotDoneRequest extends com.google.protobuf.GeneratedMessage implements IsSnapshotDoneRequestOrBuilder { // Use IsSnapshotDoneRequest.newBuilder() to construct. private IsSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsSnapshotDoneRequest defaultInstance; public static IsSnapshotDoneRequest getDefaultInstance() { return defaultInstance; } public IsSnapshotDoneRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsSnapshotDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.Builder.class); } public static com.google.protobuf.Parser<IsSnapshotDoneRequest> PARSER = new com.google.protobuf.AbstractParser<IsSnapshotDoneRequest>() { public IsSnapshotDoneRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsSnapshotDoneRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsSnapshotDoneRequest> getParserForType() { return PARSER; } private int bitField0_; // optional .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) obj; boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsSnapshotDoneRequest} * * <pre> * if you don't send the snapshot, then you will get it back * in the response (if the snapshot is done) so you can check the snapshot * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:IsSnapshotDoneRequest) } static { defaultInstance = new IsSnapshotDoneRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsSnapshotDoneRequest) } public interface IsSnapshotDoneResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool done = 1 [default = false]; /** * <code>optional bool done = 1 [default = false];</code> */ boolean hasDone(); /** * <code>optional bool done = 1 [default = false];</code> */ boolean getDone(); // optional .SnapshotDescription snapshot = 2; /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ boolean hasSnapshot(); /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code IsSnapshotDoneResponse} */ public static final class IsSnapshotDoneResponse extends com.google.protobuf.GeneratedMessage implements IsSnapshotDoneResponseOrBuilder { // Use IsSnapshotDoneResponse.newBuilder() to construct. private IsSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsSnapshotDoneResponse defaultInstance; public static IsSnapshotDoneResponse getDefaultInstance() { return defaultInstance; } public IsSnapshotDoneResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsSnapshotDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; done_ = input.readBool(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.Builder.class); } public static com.google.protobuf.Parser<IsSnapshotDoneResponse> PARSER = new com.google.protobuf.AbstractParser<IsSnapshotDoneResponse>() { public IsSnapshotDoneResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsSnapshotDoneResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsSnapshotDoneResponse> getParserForType() { return PARSER; } private int bitField0_; // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; /** * <code>optional bool done = 1 [default = false];</code> */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool done = 1 [default = false];</code> */ public boolean getDone() { return done_; } // optional .SnapshotDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { done_ = false; snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, done_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, done_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) obj; boolean result = true; result = result && (hasDone() == other.hasDone()); if (hasDone()) { result = result && (getDone() == other.getDone()); } result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { hash = (37 * hash) + DONE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getDone()); } if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsSnapshotDoneResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); done_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.done_ = done_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance()) return this; if (other.hasDone()) { setDone(other.getDone()); } if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bool done = 1 [default = false]; private boolean done_ ; /** * <code>optional bool done = 1 [default = false];</code> */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool done = 1 [default = false];</code> */ public boolean getDone() { return done_; } /** * <code>optional bool done = 1 [default = false];</code> */ public Builder setDone(boolean value) { bitField0_ |= 0x00000001; done_ = value; onChanged(); return this; } /** * <code>optional bool done = 1 [default = false];</code> */ public Builder clearDone() { bitField0_ = (bitField0_ & ~0x00000001); done_ = false; onChanged(); return this; } // optional .SnapshotDescription snapshot = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>optional .SnapshotDescription snapshot = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:IsSnapshotDoneResponse) } static { defaultInstance = new IsSnapshotDoneResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsSnapshotDoneResponse) } public interface IsRestoreSnapshotDoneRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .SnapshotDescription snapshot = 1; /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ boolean hasSnapshot(); /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code IsRestoreSnapshotDoneRequest} */ public static final class IsRestoreSnapshotDoneRequest extends com.google.protobuf.GeneratedMessage implements IsRestoreSnapshotDoneRequestOrBuilder { // Use IsRestoreSnapshotDoneRequest.newBuilder() to construct. private IsRestoreSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsRestoreSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsRestoreSnapshotDoneRequest defaultInstance; public static IsRestoreSnapshotDoneRequest getDefaultInstance() { return defaultInstance; } public IsRestoreSnapshotDoneRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsRestoreSnapshotDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.Builder.class); } public static com.google.protobuf.Parser<IsRestoreSnapshotDoneRequest> PARSER = new com.google.protobuf.AbstractParser<IsRestoreSnapshotDoneRequest>() { public IsRestoreSnapshotDoneRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsRestoreSnapshotDoneRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsRestoreSnapshotDoneRequest> getParserForType() { return PARSER; } private int bitField0_; // optional .SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) obj; boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsRestoreSnapshotDoneRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional .SnapshotDescription snapshot = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>optional .SnapshotDescription snapshot = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:IsRestoreSnapshotDoneRequest) } static { defaultInstance = new IsRestoreSnapshotDoneRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsRestoreSnapshotDoneRequest) } public interface IsRestoreSnapshotDoneResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool done = 1 [default = false]; /** * <code>optional bool done = 1 [default = false];</code> */ boolean hasDone(); /** * <code>optional bool done = 1 [default = false];</code> */ boolean getDone(); } /** * Protobuf type {@code IsRestoreSnapshotDoneResponse} */ public static final class IsRestoreSnapshotDoneResponse extends com.google.protobuf.GeneratedMessage implements IsRestoreSnapshotDoneResponseOrBuilder { // Use IsRestoreSnapshotDoneResponse.newBuilder() to construct. private IsRestoreSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsRestoreSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsRestoreSnapshotDoneResponse defaultInstance; public static IsRestoreSnapshotDoneResponse getDefaultInstance() { return defaultInstance; } public IsRestoreSnapshotDoneResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsRestoreSnapshotDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; done_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.Builder.class); } public static com.google.protobuf.Parser<IsRestoreSnapshotDoneResponse> PARSER = new com.google.protobuf.AbstractParser<IsRestoreSnapshotDoneResponse>() { public IsRestoreSnapshotDoneResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsRestoreSnapshotDoneResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsRestoreSnapshotDoneResponse> getParserForType() { return PARSER; } private int bitField0_; // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; /** * <code>optional bool done = 1 [default = false];</code> */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool done = 1 [default = false];</code> */ public boolean getDone() { return done_; } private void initFields() { done_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, done_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, done_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) obj; boolean result = true; result = result && (hasDone() == other.hasDone()); if (hasDone()) { result = result && (getDone() == other.getDone()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { hash = (37 * hash) + DONE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getDone()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsRestoreSnapshotDoneResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); done_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.done_ = done_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance()) return this; if (other.hasDone()) { setDone(other.getDone()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bool done = 1 [default = false]; private boolean done_ ; /** * <code>optional bool done = 1 [default = false];</code> */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool done = 1 [default = false];</code> */ public boolean getDone() { return done_; } /** * <code>optional bool done = 1 [default = false];</code> */ public Builder setDone(boolean value) { bitField0_ |= 0x00000001; done_ = value; onChanged(); return this; } /** * <code>optional bool done = 1 [default = false];</code> */ public Builder clearDone() { bitField0_ = (bitField0_ & ~0x00000001); done_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:IsRestoreSnapshotDoneResponse) } static { defaultInstance = new IsRestoreSnapshotDoneResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsRestoreSnapshotDoneResponse) } public interface GetSchemaAlterStatusRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableName table_name = 1; /** * <code>required .TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); } /** * Protobuf type {@code GetSchemaAlterStatusRequest} */ public static final class GetSchemaAlterStatusRequest extends com.google.protobuf.GeneratedMessage implements GetSchemaAlterStatusRequestOrBuilder { // Use GetSchemaAlterStatusRequest.newBuilder() to construct. private GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetSchemaAlterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetSchemaAlterStatusRequest defaultInstance; public static GetSchemaAlterStatusRequest getDefaultInstance() { return defaultInstance; } public GetSchemaAlterStatusRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetSchemaAlterStatusRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.Builder.class); } public static com.google.protobuf.Parser<GetSchemaAlterStatusRequest> PARSER = new com.google.protobuf.AbstractParser<GetSchemaAlterStatusRequest>() { public GetSchemaAlterStatusRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetSchemaAlterStatusRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetSchemaAlterStatusRequest> getParserForType() { return PARSER; } private int bitField0_; // required .TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetSchemaAlterStatusRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest) } static { defaultInstance = new GetSchemaAlterStatusRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest) } public interface GetSchemaAlterStatusResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional uint32 yet_to_update_regions = 1; /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ boolean hasYetToUpdateRegions(); /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ int getYetToUpdateRegions(); // optional uint32 total_regions = 2; /** * <code>optional uint32 total_regions = 2;</code> */ boolean hasTotalRegions(); /** * <code>optional uint32 total_regions = 2;</code> */ int getTotalRegions(); } /** * Protobuf type {@code GetSchemaAlterStatusResponse} */ public static final class GetSchemaAlterStatusResponse extends com.google.protobuf.GeneratedMessage implements GetSchemaAlterStatusResponseOrBuilder { // Use GetSchemaAlterStatusResponse.newBuilder() to construct. private GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetSchemaAlterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetSchemaAlterStatusResponse defaultInstance; public static GetSchemaAlterStatusResponse getDefaultInstance() { return defaultInstance; } public GetSchemaAlterStatusResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetSchemaAlterStatusResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; yetToUpdateRegions_ = input.readUInt32(); break; } case 16: { bitField0_ |= 0x00000002; totalRegions_ = input.readUInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.Builder.class); } public static com.google.protobuf.Parser<GetSchemaAlterStatusResponse> PARSER = new com.google.protobuf.AbstractParser<GetSchemaAlterStatusResponse>() { public GetSchemaAlterStatusResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetSchemaAlterStatusResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetSchemaAlterStatusResponse> getParserForType() { return PARSER; } private int bitField0_; // optional uint32 yet_to_update_regions = 1; public static final int YET_TO_UPDATE_REGIONS_FIELD_NUMBER = 1; private int yetToUpdateRegions_; /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ public boolean hasYetToUpdateRegions() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ public int getYetToUpdateRegions() { return yetToUpdateRegions_; } // optional uint32 total_regions = 2; public static final int TOTAL_REGIONS_FIELD_NUMBER = 2; private int totalRegions_; /** * <code>optional uint32 total_regions = 2;</code> */ public boolean hasTotalRegions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint32 total_regions = 2;</code> */ public int getTotalRegions() { return totalRegions_; } private void initFields() { yetToUpdateRegions_ = 0; totalRegions_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, yetToUpdateRegions_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, totalRegions_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, yetToUpdateRegions_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(2, totalRegions_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) obj; boolean result = true; result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions()); if (hasYetToUpdateRegions()) { result = result && (getYetToUpdateRegions() == other.getYetToUpdateRegions()); } result = result && (hasTotalRegions() == other.hasTotalRegions()); if (hasTotalRegions()) { result = result && (getTotalRegions() == other.getTotalRegions()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasYetToUpdateRegions()) { hash = (37 * hash) + YET_TO_UPDATE_REGIONS_FIELD_NUMBER; hash = (53 * hash) + getYetToUpdateRegions(); } if (hasTotalRegions()) { hash = (37 * hash) + TOTAL_REGIONS_FIELD_NUMBER; hash = (53 * hash) + getTotalRegions(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetSchemaAlterStatusResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); yetToUpdateRegions_ = 0; bitField0_ = (bitField0_ & ~0x00000001); totalRegions_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.yetToUpdateRegions_ = yetToUpdateRegions_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.totalRegions_ = totalRegions_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this; if (other.hasYetToUpdateRegions()) { setYetToUpdateRegions(other.getYetToUpdateRegions()); } if (other.hasTotalRegions()) { setTotalRegions(other.getTotalRegions()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional uint32 yet_to_update_regions = 1; private int yetToUpdateRegions_ ; /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ public boolean hasYetToUpdateRegions() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ public int getYetToUpdateRegions() { return yetToUpdateRegions_; } /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ public Builder setYetToUpdateRegions(int value) { bitField0_ |= 0x00000001; yetToUpdateRegions_ = value; onChanged(); return this; } /** * <code>optional uint32 yet_to_update_regions = 1;</code> */ public Builder clearYetToUpdateRegions() { bitField0_ = (bitField0_ & ~0x00000001); yetToUpdateRegions_ = 0; onChanged(); return this; } // optional uint32 total_regions = 2; private int totalRegions_ ; /** * <code>optional uint32 total_regions = 2;</code> */ public boolean hasTotalRegions() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint32 total_regions = 2;</code> */ public int getTotalRegions() { return totalRegions_; } /** * <code>optional uint32 total_regions = 2;</code> */ public Builder setTotalRegions(int value) { bitField0_ |= 0x00000002; totalRegions_ = value; onChanged(); return this; } /** * <code>optional uint32 total_regions = 2;</code> */ public Builder clearTotalRegions() { bitField0_ = (bitField0_ & ~0x00000002); totalRegions_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse) } static { defaultInstance = new GetSchemaAlterStatusResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse) } public interface GetTableDescriptorsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .TableName table_names = 1; /** * <code>repeated .TableName table_names = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList(); /** * <code>repeated .TableName table_names = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index); /** * <code>repeated .TableName table_names = 1;</code> */ int getTableNamesCount(); /** * <code>repeated .TableName table_names = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesOrBuilderList(); /** * <code>repeated .TableName table_names = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index); // optional string regex = 2; /** * <code>optional string regex = 2;</code> */ boolean hasRegex(); /** * <code>optional string regex = 2;</code> */ java.lang.String getRegex(); /** * <code>optional string regex = 2;</code> */ com.google.protobuf.ByteString getRegexBytes(); // optional bool include_sys_tables = 3 [default = false]; /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ boolean hasIncludeSysTables(); /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ boolean getIncludeSysTables(); // optional string namespace = 4; /** * <code>optional string namespace = 4;</code> */ boolean hasNamespace(); /** * <code>optional string namespace = 4;</code> */ java.lang.String getNamespace(); /** * <code>optional string namespace = 4;</code> */ com.google.protobuf.ByteString getNamespaceBytes(); } /** * Protobuf type {@code GetTableDescriptorsRequest} */ public static final class GetTableDescriptorsRequest extends com.google.protobuf.GeneratedMessage implements GetTableDescriptorsRequestOrBuilder { // Use GetTableDescriptorsRequest.newBuilder() to construct. private GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetTableDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetTableDescriptorsRequest defaultInstance; public static GetTableDescriptorsRequest getDefaultInstance() { return defaultInstance; } public GetTableDescriptorsRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetTableDescriptorsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(); mutable_bitField0_ |= 0x00000001; } tableNames_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); break; } case 18: { bitField0_ |= 0x00000001; regex_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000002; includeSysTables_ = input.readBool(); break; } case 34: { bitField0_ |= 0x00000004; namespace_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = java.util.Collections.unmodifiableList(tableNames_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.Builder.class); } public static com.google.protobuf.Parser<GetTableDescriptorsRequest> PARSER = new com.google.protobuf.AbstractParser<GetTableDescriptorsRequest>() { public GetTableDescriptorsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetTableDescriptorsRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetTableDescriptorsRequest> getParserForType() { return PARSER; } private int bitField0_; // repeated .TableName table_names = 1; public static final int TABLE_NAMES_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_; /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() { return tableNames_; } /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesOrBuilderList() { return tableNames_; } /** * <code>repeated .TableName table_names = 1;</code> */ public int getTableNamesCount() { return tableNames_.size(); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) { return tableNames_.get(index); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index) { return tableNames_.get(index); } // optional string regex = 2; public static final int REGEX_FIELD_NUMBER = 2; private java.lang.Object regex_; /** * <code>optional string regex = 2;</code> */ public boolean hasRegex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string regex = 2;</code> */ public java.lang.String getRegex() { java.lang.Object ref = regex_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { regex_ = s; } return s; } } /** * <code>optional string regex = 2;</code> */ public com.google.protobuf.ByteString getRegexBytes() { java.lang.Object ref = regex_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); regex_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bool include_sys_tables = 3 [default = false]; public static final int INCLUDE_SYS_TABLES_FIELD_NUMBER = 3; private boolean includeSysTables_; /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ public boolean hasIncludeSysTables() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ public boolean getIncludeSysTables() { return includeSysTables_; } // optional string namespace = 4; public static final int NAMESPACE_FIELD_NUMBER = 4; private java.lang.Object namespace_; /** * <code>optional string namespace = 4;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string namespace = 4;</code> */ public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namespace_ = s; } return s; } } /** * <code>optional string namespace = 4;</code> */ public com.google.protobuf.ByteString getNamespaceBytes() { java.lang.Object ref = namespace_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { tableNames_ = java.util.Collections.emptyList(); regex_ = ""; includeSysTables_ = false; namespace_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < tableNames_.size(); i++) { output.writeMessage(1, tableNames_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(2, getRegexBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(4, getNamespaceBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tableNames_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableNames_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getRegexBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, getNamespaceBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) obj; boolean result = true; result = result && getTableNamesList() .equals(other.getTableNamesList()); result = result && (hasRegex() == other.hasRegex()); if (hasRegex()) { result = result && getRegex() .equals(other.getRegex()); } result = result && (hasIncludeSysTables() == other.hasIncludeSysTables()); if (hasIncludeSysTables()) { result = result && (getIncludeSysTables() == other.getIncludeSysTables()); } result = result && (hasNamespace() == other.hasNamespace()); if (hasNamespace()) { result = result && getNamespace() .equals(other.getNamespace()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableNamesCount() > 0) { hash = (37 * hash) + TABLE_NAMES_FIELD_NUMBER; hash = (53 * hash) + getTableNamesList().hashCode(); } if (hasRegex()) { hash = (37 * hash) + REGEX_FIELD_NUMBER; hash = (53 * hash) + getRegex().hashCode(); } if (hasIncludeSysTables()) { hash = (37 * hash) + INCLUDE_SYS_TABLES_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getIncludeSysTables()); } if (hasNamespace()) { hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getNamespace().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetTableDescriptorsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNamesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNamesBuilder_ == null) { tableNames_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { tableNamesBuilder_.clear(); } regex_ = ""; bitField0_ = (bitField0_ & ~0x00000002); includeSysTables_ = false; bitField0_ = (bitField0_ & ~0x00000004); namespace_ = ""; bitField0_ = (bitField0_ & ~0x00000008); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (tableNamesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = java.util.Collections.unmodifiableList(tableNames_); bitField0_ = (bitField0_ & ~0x00000001); } result.tableNames_ = tableNames_; } else { result.tableNames_ = tableNamesBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.regex_ = regex_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } result.includeSysTables_ = includeSysTables_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.namespace_ = namespace_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this; if (tableNamesBuilder_ == null) { if (!other.tableNames_.isEmpty()) { if (tableNames_.isEmpty()) { tableNames_ = other.tableNames_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTableNamesIsMutable(); tableNames_.addAll(other.tableNames_); } onChanged(); } } else { if (!other.tableNames_.isEmpty()) { if (tableNamesBuilder_.isEmpty()) { tableNamesBuilder_.dispose(); tableNamesBuilder_ = null; tableNames_ = other.tableNames_; bitField0_ = (bitField0_ & ~0x00000001); tableNamesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTableNamesFieldBuilder() : null; } else { tableNamesBuilder_.addAllMessages(other.tableNames_); } } } if (other.hasRegex()) { bitField0_ |= 0x00000002; regex_ = other.regex_; onChanged(); } if (other.hasIncludeSysTables()) { setIncludeSysTables(other.getIncludeSysTables()); } if (other.hasNamespace()) { bitField0_ |= 0x00000008; namespace_ = other.namespace_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .TableName table_names = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_ = java.util.Collections.emptyList(); private void ensureTableNamesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableNames_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNamesBuilder_; /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() { if (tableNamesBuilder_ == null) { return java.util.Collections.unmodifiableList(tableNames_); } else { return tableNamesBuilder_.getMessageList(); } } /** * <code>repeated .TableName table_names = 1;</code> */ public int getTableNamesCount() { if (tableNamesBuilder_ == null) { return tableNames_.size(); } else { return tableNamesBuilder_.getCount(); } } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) { if (tableNamesBuilder_ == null) { return tableNames_.get(index); } else { return tableNamesBuilder_.getMessage(index); } } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNamesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNamesIsMutable(); tableNames_.set(index, value); onChanged(); } else { tableNamesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.set(index, builderForValue.build()); onChanged(); } else { tableNamesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNamesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNamesIsMutable(); tableNames_.add(value); onChanged(); } else { tableNamesBuilder_.addMessage(value); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNamesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNamesIsMutable(); tableNames_.add(index, value); onChanged(); } else { tableNamesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.add(builderForValue.build()); onChanged(); } else { tableNamesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.add(index, builderForValue.build()); onChanged(); } else { tableNamesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addAllTableNames( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); super.addAll(values, tableNames_); onChanged(); } else { tableNamesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder clearTableNames() { if (tableNamesBuilder_ == null) { tableNames_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tableNamesBuilder_.clear(); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder removeTableNames(int index) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.remove(index); onChanged(); } else { tableNamesBuilder_.remove(index); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNamesBuilder( int index) { return getTableNamesFieldBuilder().getBuilder(index); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index) { if (tableNamesBuilder_ == null) { return tableNames_.get(index); } else { return tableNamesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesOrBuilderList() { if (tableNamesBuilder_ != null) { return tableNamesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tableNames_); } } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder() { return getTableNamesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder( int index) { return getTableNamesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()); } /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder> getTableNamesBuilderList() { return getTableNamesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesFieldBuilder() { if (tableNamesBuilder_ == null) { tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableNames_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); tableNames_ = null; } return tableNamesBuilder_; } // optional string regex = 2; private java.lang.Object regex_ = ""; /** * <code>optional string regex = 2;</code> */ public boolean hasRegex() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string regex = 2;</code> */ public java.lang.String getRegex() { java.lang.Object ref = regex_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); regex_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string regex = 2;</code> */ public com.google.protobuf.ByteString getRegexBytes() { java.lang.Object ref = regex_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); regex_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string regex = 2;</code> */ public Builder setRegex( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; regex_ = value; onChanged(); return this; } /** * <code>optional string regex = 2;</code> */ public Builder clearRegex() { bitField0_ = (bitField0_ & ~0x00000002); regex_ = getDefaultInstance().getRegex(); onChanged(); return this; } /** * <code>optional string regex = 2;</code> */ public Builder setRegexBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; regex_ = value; onChanged(); return this; } // optional bool include_sys_tables = 3 [default = false]; private boolean includeSysTables_ ; /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ public boolean hasIncludeSysTables() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ public boolean getIncludeSysTables() { return includeSysTables_; } /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ public Builder setIncludeSysTables(boolean value) { bitField0_ |= 0x00000004; includeSysTables_ = value; onChanged(); return this; } /** * <code>optional bool include_sys_tables = 3 [default = false];</code> */ public Builder clearIncludeSysTables() { bitField0_ = (bitField0_ & ~0x00000004); includeSysTables_ = false; onChanged(); return this; } // optional string namespace = 4; private java.lang.Object namespace_ = ""; /** * <code>optional string namespace = 4;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string namespace = 4;</code> */ public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); namespace_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string namespace = 4;</code> */ public com.google.protobuf.ByteString getNamespaceBytes() { java.lang.Object ref = namespace_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string namespace = 4;</code> */ public Builder setNamespace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; namespace_ = value; onChanged(); return this; } /** * <code>optional string namespace = 4;</code> */ public Builder clearNamespace() { bitField0_ = (bitField0_ & ~0x00000008); namespace_ = getDefaultInstance().getNamespace(); onChanged(); return this; } /** * <code>optional string namespace = 4;</code> */ public Builder setNamespaceBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; namespace_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest) } static { defaultInstance = new GetTableDescriptorsRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest) } public interface GetTableDescriptorsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .TableSchema table_schema = 1; /** * <code>repeated .TableSchema table_schema = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList(); /** * <code>repeated .TableSchema table_schema = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); /** * <code>repeated .TableSchema table_schema = 1;</code> */ int getTableSchemaCount(); /** * <code>repeated .TableSchema table_schema = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaOrBuilderList(); /** * <code>repeated .TableSchema table_schema = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index); } /** * Protobuf type {@code GetTableDescriptorsResponse} */ public static final class GetTableDescriptorsResponse extends com.google.protobuf.GeneratedMessage implements GetTableDescriptorsResponseOrBuilder { // Use GetTableDescriptorsResponse.newBuilder() to construct. private GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetTableDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetTableDescriptorsResponse defaultInstance; public static GetTableDescriptorsResponse getDefaultInstance() { return defaultInstance; } public GetTableDescriptorsResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetTableDescriptorsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>(); mutable_bitField0_ |= 0x00000001; } tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.Builder.class); } public static com.google.protobuf.Parser<GetTableDescriptorsResponse> PARSER = new com.google.protobuf.AbstractParser<GetTableDescriptorsResponse>() { public GetTableDescriptorsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetTableDescriptorsResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetTableDescriptorsResponse> getParserForType() { return PARSER; } // repeated .TableSchema table_schema = 1; public static final int TABLE_SCHEMA_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_; /** * <code>repeated .TableSchema table_schema = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() { return tableSchema_; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaOrBuilderList() { return tableSchema_; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public int getTableSchemaCount() { return tableSchema_.size(); } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { return tableSchema_.get(index); } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { return tableSchema_.get(index); } private void initFields() { tableSchema_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < tableSchema_.size(); i++) { output.writeMessage(1, tableSchema_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tableSchema_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableSchema_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) obj; boolean result = true; result = result && getTableSchemaList() .equals(other.getTableSchemaList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableSchemaCount() > 0) { hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchemaList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetTableDescriptorsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { tableSchema_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { tableSchemaBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse(this); int from_bitField0_ = bitField0_; if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); bitField0_ = (bitField0_ & ~0x00000001); } result.tableSchema_ = tableSchema_; } else { result.tableSchema_ = tableSchemaBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this; if (tableSchemaBuilder_ == null) { if (!other.tableSchema_.isEmpty()) { if (tableSchema_.isEmpty()) { tableSchema_ = other.tableSchema_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTableSchemaIsMutable(); tableSchema_.addAll(other.tableSchema_); } onChanged(); } } else { if (!other.tableSchema_.isEmpty()) { if (tableSchemaBuilder_.isEmpty()) { tableSchemaBuilder_.dispose(); tableSchemaBuilder_ = null; tableSchema_ = other.tableSchema_; bitField0_ = (bitField0_ & ~0x00000001); tableSchemaBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTableSchemaFieldBuilder() : null; } else { tableSchemaBuilder_.addAllMessages(other.tableSchema_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableSchemaCount(); i++) { if (!getTableSchema(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .TableSchema table_schema = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_ = java.util.Collections.emptyList(); private void ensureTableSchemaIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>(tableSchema_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; /** * <code>repeated .TableSchema table_schema = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() { if (tableSchemaBuilder_ == null) { return java.util.Collections.unmodifiableList(tableSchema_); } else { return tableSchemaBuilder_.getMessageList(); } } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public int getTableSchemaCount() { if (tableSchemaBuilder_ == null) { return tableSchema_.size(); } else { return tableSchemaBuilder_.getCount(); } } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { if (tableSchemaBuilder_ == null) { return tableSchema_.get(index); } else { return tableSchemaBuilder_.getMessage(index); } } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableSchemaIsMutable(); tableSchema_.set(index, value); onChanged(); } else { tableSchemaBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.set(index, builderForValue.build()); onChanged(); } else { tableSchemaBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableSchemaIsMutable(); tableSchema_.add(value); onChanged(); } else { tableSchemaBuilder_.addMessage(value); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableSchemaIsMutable(); tableSchema_.add(index, value); onChanged(); } else { tableSchemaBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder addTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.add(builderForValue.build()); onChanged(); } else { tableSchemaBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.add(index, builderForValue.build()); onChanged(); } else { tableSchemaBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder addAllTableSchema( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> values) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); super.addAll(values, tableSchema_); onChanged(); } else { tableSchemaBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tableSchemaBuilder_.clear(); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public Builder removeTableSchema(int index) { if (tableSchemaBuilder_ == null) { ensureTableSchemaIsMutable(); tableSchema_.remove(index); onChanged(); } else { tableSchemaBuilder_.remove(index); } return this; } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder( int index) { return getTableSchemaFieldBuilder().getBuilder(index); } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( int index) { if (tableSchemaBuilder_ == null) { return tableSchema_.get(index); } else { return tableSchemaBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaOrBuilderList() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tableSchema_); } } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() { return getTableSchemaFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder( int index) { return getTableSchemaFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); } /** * <code>repeated .TableSchema table_schema = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder> getTableSchemaBuilderList() { return getTableSchemaFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); tableSchema_ = null; } return tableSchemaBuilder_; } // @@protoc_insertion_point(builder_scope:GetTableDescriptorsResponse) } static { defaultInstance = new GetTableDescriptorsResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetTableDescriptorsResponse) } public interface GetTableNamesRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string regex = 1; /** * <code>optional string regex = 1;</code> */ boolean hasRegex(); /** * <code>optional string regex = 1;</code> */ java.lang.String getRegex(); /** * <code>optional string regex = 1;</code> */ com.google.protobuf.ByteString getRegexBytes(); // optional bool include_sys_tables = 2 [default = false]; /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ boolean hasIncludeSysTables(); /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ boolean getIncludeSysTables(); // optional string namespace = 3; /** * <code>optional string namespace = 3;</code> */ boolean hasNamespace(); /** * <code>optional string namespace = 3;</code> */ java.lang.String getNamespace(); /** * <code>optional string namespace = 3;</code> */ com.google.protobuf.ByteString getNamespaceBytes(); } /** * Protobuf type {@code GetTableNamesRequest} */ public static final class GetTableNamesRequest extends com.google.protobuf.GeneratedMessage implements GetTableNamesRequestOrBuilder { // Use GetTableNamesRequest.newBuilder() to construct. private GetTableNamesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetTableNamesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetTableNamesRequest defaultInstance; public static GetTableNamesRequest getDefaultInstance() { return defaultInstance; } public GetTableNamesRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetTableNamesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; regex_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; includeSysTables_ = input.readBool(); break; } case 26: { bitField0_ |= 0x00000004; namespace_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.Builder.class); } public static com.google.protobuf.Parser<GetTableNamesRequest> PARSER = new com.google.protobuf.AbstractParser<GetTableNamesRequest>() { public GetTableNamesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetTableNamesRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetTableNamesRequest> getParserForType() { return PARSER; } private int bitField0_; // optional string regex = 1; public static final int REGEX_FIELD_NUMBER = 1; private java.lang.Object regex_; /** * <code>optional string regex = 1;</code> */ public boolean hasRegex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string regex = 1;</code> */ public java.lang.String getRegex() { java.lang.Object ref = regex_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { regex_ = s; } return s; } } /** * <code>optional string regex = 1;</code> */ public com.google.protobuf.ByteString getRegexBytes() { java.lang.Object ref = regex_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); regex_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bool include_sys_tables = 2 [default = false]; public static final int INCLUDE_SYS_TABLES_FIELD_NUMBER = 2; private boolean includeSysTables_; /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ public boolean hasIncludeSysTables() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ public boolean getIncludeSysTables() { return includeSysTables_; } // optional string namespace = 3; public static final int NAMESPACE_FIELD_NUMBER = 3; private java.lang.Object namespace_; /** * <code>optional string namespace = 3;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string namespace = 3;</code> */ public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { namespace_ = s; } return s; } } /** * <code>optional string namespace = 3;</code> */ public com.google.protobuf.ByteString getNamespaceBytes() { java.lang.Object ref = namespace_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { regex_ = ""; includeSysTables_ = false; namespace_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getRegexBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getNamespaceBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getRegexBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, includeSysTables_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getNamespaceBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest) obj; boolean result = true; result = result && (hasRegex() == other.hasRegex()); if (hasRegex()) { result = result && getRegex() .equals(other.getRegex()); } result = result && (hasIncludeSysTables() == other.hasIncludeSysTables()); if (hasIncludeSysTables()) { result = result && (getIncludeSysTables() == other.getIncludeSysTables()); } result = result && (hasNamespace() == other.hasNamespace()); if (hasNamespace()) { result = result && getNamespace() .equals(other.getNamespace()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegex()) { hash = (37 * hash) + REGEX_FIELD_NUMBER; hash = (53 * hash) + getRegex().hashCode(); } if (hasIncludeSysTables()) { hash = (37 * hash) + INCLUDE_SYS_TABLES_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getIncludeSysTables()); } if (hasNamespace()) { hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; hash = (53 * hash) + getNamespace().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetTableNamesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); regex_ = ""; bitField0_ = (bitField0_ & ~0x00000001); includeSysTables_ = false; bitField0_ = (bitField0_ & ~0x00000002); namespace_ = ""; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.regex_ = regex_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.includeSysTables_ = includeSysTables_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.namespace_ = namespace_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance()) return this; if (other.hasRegex()) { bitField0_ |= 0x00000001; regex_ = other.regex_; onChanged(); } if (other.hasIncludeSysTables()) { setIncludeSysTables(other.getIncludeSysTables()); } if (other.hasNamespace()) { bitField0_ |= 0x00000004; namespace_ = other.namespace_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional string regex = 1; private java.lang.Object regex_ = ""; /** * <code>optional string regex = 1;</code> */ public boolean hasRegex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string regex = 1;</code> */ public java.lang.String getRegex() { java.lang.Object ref = regex_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); regex_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string regex = 1;</code> */ public com.google.protobuf.ByteString getRegexBytes() { java.lang.Object ref = regex_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); regex_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string regex = 1;</code> */ public Builder setRegex( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; regex_ = value; onChanged(); return this; } /** * <code>optional string regex = 1;</code> */ public Builder clearRegex() { bitField0_ = (bitField0_ & ~0x00000001); regex_ = getDefaultInstance().getRegex(); onChanged(); return this; } /** * <code>optional string regex = 1;</code> */ public Builder setRegexBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; regex_ = value; onChanged(); return this; } // optional bool include_sys_tables = 2 [default = false]; private boolean includeSysTables_ ; /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ public boolean hasIncludeSysTables() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ public boolean getIncludeSysTables() { return includeSysTables_; } /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ public Builder setIncludeSysTables(boolean value) { bitField0_ |= 0x00000002; includeSysTables_ = value; onChanged(); return this; } /** * <code>optional bool include_sys_tables = 2 [default = false];</code> */ public Builder clearIncludeSysTables() { bitField0_ = (bitField0_ & ~0x00000002); includeSysTables_ = false; onChanged(); return this; } // optional string namespace = 3; private java.lang.Object namespace_ = ""; /** * <code>optional string namespace = 3;</code> */ public boolean hasNamespace() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string namespace = 3;</code> */ public java.lang.String getNamespace() { java.lang.Object ref = namespace_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); namespace_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string namespace = 3;</code> */ public com.google.protobuf.ByteString getNamespaceBytes() { java.lang.Object ref = namespace_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); namespace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string namespace = 3;</code> */ public Builder setNamespace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; namespace_ = value; onChanged(); return this; } /** * <code>optional string namespace = 3;</code> */ public Builder clearNamespace() { bitField0_ = (bitField0_ & ~0x00000004); namespace_ = getDefaultInstance().getNamespace(); onChanged(); return this; } /** * <code>optional string namespace = 3;</code> */ public Builder setNamespaceBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; namespace_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:GetTableNamesRequest) } static { defaultInstance = new GetTableNamesRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetTableNamesRequest) } public interface GetTableNamesResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .TableName table_names = 1; /** * <code>repeated .TableName table_names = 1;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList(); /** * <code>repeated .TableName table_names = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index); /** * <code>repeated .TableName table_names = 1;</code> */ int getTableNamesCount(); /** * <code>repeated .TableName table_names = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesOrBuilderList(); /** * <code>repeated .TableName table_names = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index); } /** * Protobuf type {@code GetTableNamesResponse} */ public static final class GetTableNamesResponse extends com.google.protobuf.GeneratedMessage implements GetTableNamesResponseOrBuilder { // Use GetTableNamesResponse.newBuilder() to construct. private GetTableNamesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetTableNamesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetTableNamesResponse defaultInstance; public static GetTableNamesResponse getDefaultInstance() { return defaultInstance; } public GetTableNamesResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetTableNamesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(); mutable_bitField0_ |= 0x00000001; } tableNames_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = java.util.Collections.unmodifiableList(tableNames_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.Builder.class); } public static com.google.protobuf.Parser<GetTableNamesResponse> PARSER = new com.google.protobuf.AbstractParser<GetTableNamesResponse>() { public GetTableNamesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetTableNamesResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetTableNamesResponse> getParserForType() { return PARSER; } // repeated .TableName table_names = 1; public static final int TABLE_NAMES_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_; /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() { return tableNames_; } /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesOrBuilderList() { return tableNames_; } /** * <code>repeated .TableName table_names = 1;</code> */ public int getTableNamesCount() { return tableNames_.size(); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) { return tableNames_.get(index); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index) { return tableNames_.get(index); } private void initFields() { tableNames_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < tableNames_.size(); i++) { output.writeMessage(1, tableNames_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tableNames_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableNames_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) obj; boolean result = true; result = result && getTableNamesList() .equals(other.getTableNamesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getTableNamesCount() > 0) { hash = (37 * hash) + TABLE_NAMES_FIELD_NUMBER; hash = (53 * hash) + getTableNamesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetTableNamesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNamesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNamesBuilder_ == null) { tableNames_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { tableNamesBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse(this); int from_bitField0_ = bitField0_; if (tableNamesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = java.util.Collections.unmodifiableList(tableNames_); bitField0_ = (bitField0_ & ~0x00000001); } result.tableNames_ = tableNames_; } else { result.tableNames_ = tableNamesBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance()) return this; if (tableNamesBuilder_ == null) { if (!other.tableNames_.isEmpty()) { if (tableNames_.isEmpty()) { tableNames_ = other.tableNames_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTableNamesIsMutable(); tableNames_.addAll(other.tableNames_); } onChanged(); } } else { if (!other.tableNames_.isEmpty()) { if (tableNamesBuilder_.isEmpty()) { tableNamesBuilder_.dispose(); tableNamesBuilder_ = null; tableNames_ = other.tableNames_; bitField0_ = (bitField0_ & ~0x00000001); tableNamesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTableNamesFieldBuilder() : null; } else { tableNamesBuilder_.addAllMessages(other.tableNames_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { for (int i = 0; i < getTableNamesCount(); i++) { if (!getTableNames(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // repeated .TableName table_names = 1; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_ = java.util.Collections.emptyList(); private void ensureTableNamesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableNames_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNamesBuilder_; /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() { if (tableNamesBuilder_ == null) { return java.util.Collections.unmodifiableList(tableNames_); } else { return tableNamesBuilder_.getMessageList(); } } /** * <code>repeated .TableName table_names = 1;</code> */ public int getTableNamesCount() { if (tableNamesBuilder_ == null) { return tableNames_.size(); } else { return tableNamesBuilder_.getCount(); } } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) { if (tableNamesBuilder_ == null) { return tableNames_.get(index); } else { return tableNamesBuilder_.getMessage(index); } } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNamesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNamesIsMutable(); tableNames_.set(index, value); onChanged(); } else { tableNamesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.set(index, builderForValue.build()); onChanged(); } else { tableNamesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNamesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNamesIsMutable(); tableNames_.add(value); onChanged(); } else { tableNamesBuilder_.addMessage(value); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNamesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTableNamesIsMutable(); tableNames_.add(index, value); onChanged(); } else { tableNamesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.add(builderForValue.build()); onChanged(); } else { tableNamesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.add(index, builderForValue.build()); onChanged(); } else { tableNamesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder addAllTableNames( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); super.addAll(values, tableNames_); onChanged(); } else { tableNamesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder clearTableNames() { if (tableNamesBuilder_ == null) { tableNames_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tableNamesBuilder_.clear(); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public Builder removeTableNames(int index) { if (tableNamesBuilder_ == null) { ensureTableNamesIsMutable(); tableNames_.remove(index); onChanged(); } else { tableNamesBuilder_.remove(index); } return this; } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNamesBuilder( int index) { return getTableNamesFieldBuilder().getBuilder(index); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder( int index) { if (tableNamesBuilder_ == null) { return tableNames_.get(index); } else { return tableNamesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesOrBuilderList() { if (tableNamesBuilder_ != null) { return tableNamesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tableNames_); } } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder() { return getTableNamesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()); } /** * <code>repeated .TableName table_names = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder( int index) { return getTableNamesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()); } /** * <code>repeated .TableName table_names = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder> getTableNamesBuilderList() { return getTableNamesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNamesFieldBuilder() { if (tableNamesBuilder_ == null) { tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableNames_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); tableNames_ = null; } return tableNamesBuilder_; } // @@protoc_insertion_point(builder_scope:GetTableNamesResponse) } static { defaultInstance = new GetTableNamesResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetTableNamesResponse) } public interface GetClusterStatusRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code GetClusterStatusRequest} */ public static final class GetClusterStatusRequest extends com.google.protobuf.GeneratedMessage implements GetClusterStatusRequestOrBuilder { // Use GetClusterStatusRequest.newBuilder() to construct. private GetClusterStatusRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetClusterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetClusterStatusRequest defaultInstance; public static GetClusterStatusRequest getDefaultInstance() { return defaultInstance; } public GetClusterStatusRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterStatusRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.Builder.class); } public static com.google.protobuf.Parser<GetClusterStatusRequest> PARSER = new com.google.protobuf.AbstractParser<GetClusterStatusRequest>() { public GetClusterStatusRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetClusterStatusRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetClusterStatusRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetClusterStatusRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:GetClusterStatusRequest) } static { defaultInstance = new GetClusterStatusRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetClusterStatusRequest) } public interface GetClusterStatusResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .ClusterStatus cluster_status = 1; /** * <code>required .ClusterStatus cluster_status = 1;</code> */ boolean hasClusterStatus(); /** * <code>required .ClusterStatus cluster_status = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus(); /** * <code>required .ClusterStatus cluster_status = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder(); } /** * Protobuf type {@code GetClusterStatusResponse} */ public static final class GetClusterStatusResponse extends com.google.protobuf.GeneratedMessage implements GetClusterStatusResponseOrBuilder { // Use GetClusterStatusResponse.newBuilder() to construct. private GetClusterStatusResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GetClusterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GetClusterStatusResponse defaultInstance; public static GetClusterStatusResponse getDefaultInstance() { return defaultInstance; } public GetClusterStatusResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetClusterStatusResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = clusterStatus_.toBuilder(); } clusterStatus_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(clusterStatus_); clusterStatus_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.Builder.class); } public static com.google.protobuf.Parser<GetClusterStatusResponse> PARSER = new com.google.protobuf.AbstractParser<GetClusterStatusResponse>() { public GetClusterStatusResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetClusterStatusResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetClusterStatusResponse> getParserForType() { return PARSER; } private int bitField0_; // required .ClusterStatus cluster_status = 1; public static final int CLUSTER_STATUS_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_; /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public boolean hasClusterStatus() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { return clusterStatus_; } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { return clusterStatus_; } private void initFields() { clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasClusterStatus()) { memoizedIsInitialized = 0; return false; } if (!getClusterStatus().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, clusterStatus_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, clusterStatus_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) obj; boolean result = true; result = result && (hasClusterStatus() == other.hasClusterStatus()); if (hasClusterStatus()) { result = result && getClusterStatus() .equals(other.getClusterStatus()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasClusterStatus()) { hash = (37 * hash) + CLUSTER_STATUS_FIELD_NUMBER; hash = (53 * hash) + getClusterStatus().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code GetClusterStatusResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getClusterStatusFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (clusterStatusBuilder_ == null) { clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); } else { clusterStatusBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (clusterStatusBuilder_ == null) { result.clusterStatus_ = clusterStatus_; } else { result.clusterStatus_ = clusterStatusBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance()) return this; if (other.hasClusterStatus()) { mergeClusterStatus(other.getClusterStatus()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasClusterStatus()) { return false; } if (!getClusterStatus().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .ClusterStatus cluster_status = 1; private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> clusterStatusBuilder_; /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public boolean hasClusterStatus() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() { if (clusterStatusBuilder_ == null) { return clusterStatus_; } else { return clusterStatusBuilder_.getMessage(); } } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public Builder setClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { if (clusterStatusBuilder_ == null) { if (value == null) { throw new NullPointerException(); } clusterStatus_ = value; onChanged(); } else { clusterStatusBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public Builder setClusterStatus( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder builderForValue) { if (clusterStatusBuilder_ == null) { clusterStatus_ = builderForValue.build(); onChanged(); } else { clusterStatusBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public Builder mergeClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) { if (clusterStatusBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && clusterStatus_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) { clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(clusterStatus_).mergeFrom(value).buildPartial(); } else { clusterStatus_ = value; } onChanged(); } else { clusterStatusBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public Builder clearClusterStatus() { if (clusterStatusBuilder_ == null) { clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); onChanged(); } else { clusterStatusBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder getClusterStatusBuilder() { bitField0_ |= 0x00000001; onChanged(); return getClusterStatusFieldBuilder().getBuilder(); } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() { if (clusterStatusBuilder_ != null) { return clusterStatusBuilder_.getMessageOrBuilder(); } else { return clusterStatus_; } } /** * <code>required .ClusterStatus cluster_status = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> getClusterStatusFieldBuilder() { if (clusterStatusBuilder_ == null) { clusterStatusBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder>( clusterStatus_, getParentForChildren(), isClean()); clusterStatus_ = null; } return clusterStatusBuilder_; } // @@protoc_insertion_point(builder_scope:GetClusterStatusResponse) } static { defaultInstance = new GetClusterStatusResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:GetClusterStatusResponse) } public interface IsMasterRunningRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code IsMasterRunningRequest} */ public static final class IsMasterRunningRequest extends com.google.protobuf.GeneratedMessage implements IsMasterRunningRequestOrBuilder { // Use IsMasterRunningRequest.newBuilder() to construct. private IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsMasterRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsMasterRunningRequest defaultInstance; public static IsMasterRunningRequest getDefaultInstance() { return defaultInstance; } public IsMasterRunningRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsMasterRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); } public static com.google.protobuf.Parser<IsMasterRunningRequest> PARSER = new com.google.protobuf.AbstractParser<IsMasterRunningRequest>() { public IsMasterRunningRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsMasterRunningRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsMasterRunningRequest> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsMasterRunningRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:IsMasterRunningRequest) } static { defaultInstance = new IsMasterRunningRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsMasterRunningRequest) } public interface IsMasterRunningResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool is_master_running = 1; /** * <code>required bool is_master_running = 1;</code> */ boolean hasIsMasterRunning(); /** * <code>required bool is_master_running = 1;</code> */ boolean getIsMasterRunning(); } /** * Protobuf type {@code IsMasterRunningResponse} */ public static final class IsMasterRunningResponse extends com.google.protobuf.GeneratedMessage implements IsMasterRunningResponseOrBuilder { // Use IsMasterRunningResponse.newBuilder() to construct. private IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsMasterRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsMasterRunningResponse defaultInstance; public static IsMasterRunningResponse getDefaultInstance() { return defaultInstance; } public IsMasterRunningResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsMasterRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; isMasterRunning_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); } public static com.google.protobuf.Parser<IsMasterRunningResponse> PARSER = new com.google.protobuf.AbstractParser<IsMasterRunningResponse>() { public IsMasterRunningResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsMasterRunningResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsMasterRunningResponse> getParserForType() { return PARSER; } private int bitField0_; // required bool is_master_running = 1; public static final int IS_MASTER_RUNNING_FIELD_NUMBER = 1; private boolean isMasterRunning_; /** * <code>required bool is_master_running = 1;</code> */ public boolean hasIsMasterRunning() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool is_master_running = 1;</code> */ public boolean getIsMasterRunning() { return isMasterRunning_; } private void initFields() { isMasterRunning_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasIsMasterRunning()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, isMasterRunning_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, isMasterRunning_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) obj; boolean result = true; result = result && (hasIsMasterRunning() == other.hasIsMasterRunning()); if (hasIsMasterRunning()) { result = result && (getIsMasterRunning() == other.getIsMasterRunning()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIsMasterRunning()) { hash = (37 * hash) + IS_MASTER_RUNNING_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getIsMasterRunning()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsMasterRunningResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); isMasterRunning_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.isMasterRunning_ = isMasterRunning_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()) return this; if (other.hasIsMasterRunning()) { setIsMasterRunning(other.getIsMasterRunning()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasIsMasterRunning()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bool is_master_running = 1; private boolean isMasterRunning_ ; /** * <code>required bool is_master_running = 1;</code> */ public boolean hasIsMasterRunning() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool is_master_running = 1;</code> */ public boolean getIsMasterRunning() { return isMasterRunning_; } /** * <code>required bool is_master_running = 1;</code> */ public Builder setIsMasterRunning(boolean value) { bitField0_ |= 0x00000001; isMasterRunning_ = value; onChanged(); return this; } /** * <code>required bool is_master_running = 1;</code> */ public Builder clearIsMasterRunning() { bitField0_ = (bitField0_ & ~0x00000001); isMasterRunning_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:IsMasterRunningResponse) } static { defaultInstance = new IsMasterRunningResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsMasterRunningResponse) } public interface ExecProcedureRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .ProcedureDescription procedure = 1; /** * <code>required .ProcedureDescription procedure = 1;</code> */ boolean hasProcedure(); /** * <code>required .ProcedureDescription procedure = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure(); /** * <code>required .ProcedureDescription procedure = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder(); } /** * Protobuf type {@code ExecProcedureRequest} */ public static final class ExecProcedureRequest extends com.google.protobuf.GeneratedMessage implements ExecProcedureRequestOrBuilder { // Use ExecProcedureRequest.newBuilder() to construct. private ExecProcedureRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ExecProcedureRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ExecProcedureRequest defaultInstance; public static ExecProcedureRequest getDefaultInstance() { return defaultInstance; } public ExecProcedureRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExecProcedureRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = procedure_.toBuilder(); } procedure_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(procedure_); procedure_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.Builder.class); } public static com.google.protobuf.Parser<ExecProcedureRequest> PARSER = new com.google.protobuf.AbstractParser<ExecProcedureRequest>() { public ExecProcedureRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ExecProcedureRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ExecProcedureRequest> getParserForType() { return PARSER; } private int bitField0_; // required .ProcedureDescription procedure = 1; public static final int PROCEDURE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_; /** * <code>required .ProcedureDescription procedure = 1;</code> */ public boolean hasProcedure() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { return procedure_; } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() { return procedure_; } private void initFields() { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasProcedure()) { memoizedIsInitialized = 0; return false; } if (!getProcedure().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, procedure_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, procedure_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest) obj; boolean result = true; result = result && (hasProcedure() == other.hasProcedure()); if (hasProcedure()) { result = result && getProcedure() .equals(other.getProcedure()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcedure()) { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; hash = (53 * hash) + getProcedure().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ExecProcedureRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (procedureBuilder_ == null) { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } else { procedureBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (procedureBuilder_ == null) { result.procedure_ = procedure_; } else { result.procedure_ = procedureBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance()) return this; if (other.hasProcedure()) { mergeProcedure(other.getProcedure()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasProcedure()) { return false; } if (!getProcedure().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .ProcedureDescription procedure = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> procedureBuilder_; /** * <code>required .ProcedureDescription procedure = 1;</code> */ public boolean hasProcedure() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { if (procedureBuilder_ == null) { return procedure_; } else { return procedureBuilder_.getMessage(); } } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public Builder setProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (procedureBuilder_ == null) { if (value == null) { throw new NullPointerException(); } procedure_ = value; onChanged(); } else { procedureBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public Builder setProcedure( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue) { if (procedureBuilder_ == null) { procedure_ = builderForValue.build(); onChanged(); } else { procedureBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public Builder mergeProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (procedureBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && procedure_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(procedure_).mergeFrom(value).buildPartial(); } else { procedure_ = value; } onChanged(); } else { procedureBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public Builder clearProcedure() { if (procedureBuilder_ == null) { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); onChanged(); } else { procedureBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder getProcedureBuilder() { bitField0_ |= 0x00000001; onChanged(); return getProcedureFieldBuilder().getBuilder(); } /** * <code>required .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() { if (procedureBuilder_ != null) { return procedureBuilder_.getMessageOrBuilder(); } else { return procedure_; } } /** * <code>required .ProcedureDescription procedure = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { procedureBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>( procedure_, getParentForChildren(), isClean()); procedure_ = null; } return procedureBuilder_; } // @@protoc_insertion_point(builder_scope:ExecProcedureRequest) } static { defaultInstance = new ExecProcedureRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ExecProcedureRequest) } public interface ExecProcedureResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional int64 expected_timeout = 1; /** * <code>optional int64 expected_timeout = 1;</code> */ boolean hasExpectedTimeout(); /** * <code>optional int64 expected_timeout = 1;</code> */ long getExpectedTimeout(); // optional bytes return_data = 2; /** * <code>optional bytes return_data = 2;</code> */ boolean hasReturnData(); /** * <code>optional bytes return_data = 2;</code> */ com.google.protobuf.ByteString getReturnData(); } /** * Protobuf type {@code ExecProcedureResponse} */ public static final class ExecProcedureResponse extends com.google.protobuf.GeneratedMessage implements ExecProcedureResponseOrBuilder { // Use ExecProcedureResponse.newBuilder() to construct. private ExecProcedureResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ExecProcedureResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ExecProcedureResponse defaultInstance; public static ExecProcedureResponse getDefaultInstance() { return defaultInstance; } public ExecProcedureResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExecProcedureResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; expectedTimeout_ = input.readInt64(); break; } case 18: { bitField0_ |= 0x00000002; returnData_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.Builder.class); } public static com.google.protobuf.Parser<ExecProcedureResponse> PARSER = new com.google.protobuf.AbstractParser<ExecProcedureResponse>() { public ExecProcedureResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ExecProcedureResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ExecProcedureResponse> getParserForType() { return PARSER; } private int bitField0_; // optional int64 expected_timeout = 1; public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1; private long expectedTimeout_; /** * <code>optional int64 expected_timeout = 1;</code> */ public boolean hasExpectedTimeout() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int64 expected_timeout = 1;</code> */ public long getExpectedTimeout() { return expectedTimeout_; } // optional bytes return_data = 2; public static final int RETURN_DATA_FIELD_NUMBER = 2; private com.google.protobuf.ByteString returnData_; /** * <code>optional bytes return_data = 2;</code> */ public boolean hasReturnData() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes return_data = 2;</code> */ public com.google.protobuf.ByteString getReturnData() { return returnData_; } private void initFields() { expectedTimeout_ = 0L; returnData_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, expectedTimeout_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, returnData_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, expectedTimeout_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, returnData_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) obj; boolean result = true; result = result && (hasExpectedTimeout() == other.hasExpectedTimeout()); if (hasExpectedTimeout()) { result = result && (getExpectedTimeout() == other.getExpectedTimeout()); } result = result && (hasReturnData() == other.hasReturnData()); if (hasReturnData()) { result = result && getReturnData() .equals(other.getReturnData()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExpectedTimeout()) { hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + hashLong(getExpectedTimeout()); } if (hasReturnData()) { hash = (37 * hash) + RETURN_DATA_FIELD_NUMBER; hash = (53 * hash) + getReturnData().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code ExecProcedureResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); expectedTimeout_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); returnData_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.expectedTimeout_ = expectedTimeout_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.returnData_ = returnData_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()) return this; if (other.hasExpectedTimeout()) { setExpectedTimeout(other.getExpectedTimeout()); } if (other.hasReturnData()) { setReturnData(other.getReturnData()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional int64 expected_timeout = 1; private long expectedTimeout_ ; /** * <code>optional int64 expected_timeout = 1;</code> */ public boolean hasExpectedTimeout() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int64 expected_timeout = 1;</code> */ public long getExpectedTimeout() { return expectedTimeout_; } /** * <code>optional int64 expected_timeout = 1;</code> */ public Builder setExpectedTimeout(long value) { bitField0_ |= 0x00000001; expectedTimeout_ = value; onChanged(); return this; } /** * <code>optional int64 expected_timeout = 1;</code> */ public Builder clearExpectedTimeout() { bitField0_ = (bitField0_ & ~0x00000001); expectedTimeout_ = 0L; onChanged(); return this; } // optional bytes return_data = 2; private com.google.protobuf.ByteString returnData_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes return_data = 2;</code> */ public boolean hasReturnData() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes return_data = 2;</code> */ public com.google.protobuf.ByteString getReturnData() { return returnData_; } /** * <code>optional bytes return_data = 2;</code> */ public Builder setReturnData(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; returnData_ = value; onChanged(); return this; } /** * <code>optional bytes return_data = 2;</code> */ public Builder clearReturnData() { bitField0_ = (bitField0_ & ~0x00000002); returnData_ = getDefaultInstance().getReturnData(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:ExecProcedureResponse) } static { defaultInstance = new ExecProcedureResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ExecProcedureResponse) } public interface IsProcedureDoneRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .ProcedureDescription procedure = 1; /** * <code>optional .ProcedureDescription procedure = 1;</code> */ boolean hasProcedure(); /** * <code>optional .ProcedureDescription procedure = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure(); /** * <code>optional .ProcedureDescription procedure = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder(); } /** * Protobuf type {@code IsProcedureDoneRequest} */ public static final class IsProcedureDoneRequest extends com.google.protobuf.GeneratedMessage implements IsProcedureDoneRequestOrBuilder { // Use IsProcedureDoneRequest.newBuilder() to construct. private IsProcedureDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsProcedureDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsProcedureDoneRequest defaultInstance; public static IsProcedureDoneRequest getDefaultInstance() { return defaultInstance; } public IsProcedureDoneRequest getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsProcedureDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = procedure_.toBuilder(); } procedure_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(procedure_); procedure_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.Builder.class); } public static com.google.protobuf.Parser<IsProcedureDoneRequest> PARSER = new com.google.protobuf.AbstractParser<IsProcedureDoneRequest>() { public IsProcedureDoneRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsProcedureDoneRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsProcedureDoneRequest> getParserForType() { return PARSER; } private int bitField0_; // optional .ProcedureDescription procedure = 1; public static final int PROCEDURE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_; /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public boolean hasProcedure() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { return procedure_; } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() { return procedure_; } private void initFields() { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasProcedure()) { if (!getProcedure().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, procedure_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, procedure_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest) obj; boolean result = true; result = result && (hasProcedure() == other.hasProcedure()); if (hasProcedure()) { result = result && getProcedure() .equals(other.getProcedure()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasProcedure()) { hash = (37 * hash) + PROCEDURE_FIELD_NUMBER; hash = (53 * hash) + getProcedure().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsProcedureDoneRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getProcedureFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (procedureBuilder_ == null) { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } else { procedureBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (procedureBuilder_ == null) { result.procedure_ = procedure_; } else { result.procedure_ = procedureBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance()) return this; if (other.hasProcedure()) { mergeProcedure(other.getProcedure()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasProcedure()) { if (!getProcedure().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional .ProcedureDescription procedure = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> procedureBuilder_; /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public boolean hasProcedure() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() { if (procedureBuilder_ == null) { return procedure_; } else { return procedureBuilder_.getMessage(); } } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public Builder setProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (procedureBuilder_ == null) { if (value == null) { throw new NullPointerException(); } procedure_ = value; onChanged(); } else { procedureBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public Builder setProcedure( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue) { if (procedureBuilder_ == null) { procedure_ = builderForValue.build(); onChanged(); } else { procedureBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public Builder mergeProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (procedureBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && procedure_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(procedure_).mergeFrom(value).buildPartial(); } else { procedure_ = value; } onChanged(); } else { procedureBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public Builder clearProcedure() { if (procedureBuilder_ == null) { procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); onChanged(); } else { procedureBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder getProcedureBuilder() { bitField0_ |= 0x00000001; onChanged(); return getProcedureFieldBuilder().getBuilder(); } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() { if (procedureBuilder_ != null) { return procedureBuilder_.getMessageOrBuilder(); } else { return procedure_; } } /** * <code>optional .ProcedureDescription procedure = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> getProcedureFieldBuilder() { if (procedureBuilder_ == null) { procedureBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>( procedure_, getParentForChildren(), isClean()); procedure_ = null; } return procedureBuilder_; } // @@protoc_insertion_point(builder_scope:IsProcedureDoneRequest) } static { defaultInstance = new IsProcedureDoneRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsProcedureDoneRequest) } public interface IsProcedureDoneResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool done = 1 [default = false]; /** * <code>optional bool done = 1 [default = false];</code> */ boolean hasDone(); /** * <code>optional bool done = 1 [default = false];</code> */ boolean getDone(); // optional .ProcedureDescription snapshot = 2; /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ boolean hasSnapshot(); /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot(); /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code IsProcedureDoneResponse} */ public static final class IsProcedureDoneResponse extends com.google.protobuf.GeneratedMessage implements IsProcedureDoneResponseOrBuilder { // Use IsProcedureDoneResponse.newBuilder() to construct. private IsProcedureDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private IsProcedureDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final IsProcedureDoneResponse defaultInstance; public static IsProcedureDoneResponse getDefaultInstance() { return defaultInstance; } public IsProcedureDoneResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private IsProcedureDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; done_ = input.readBool(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = snapshot_.toBuilder(); } snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.Builder.class); } public static com.google.protobuf.Parser<IsProcedureDoneResponse> PARSER = new com.google.protobuf.AbstractParser<IsProcedureDoneResponse>() { public IsProcedureDoneResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new IsProcedureDoneResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<IsProcedureDoneResponse> getParserForType() { return PARSER; } private int bitField0_; // optional bool done = 1 [default = false]; public static final int DONE_FIELD_NUMBER = 1; private boolean done_; /** * <code>optional bool done = 1 [default = false];</code> */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool done = 1 [default = false];</code> */ public boolean getDone() { return done_; } // optional .ProcedureDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_; /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot() { return snapshot_; } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { done_ = false; snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, done_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, snapshot_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, done_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, snapshot_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) obj; boolean result = true; result = result && (hasDone() == other.hasDone()); if (hasDone()) { result = result && (getDone() == other.getDone()); } result = result && (hasSnapshot() == other.hasSnapshot()); if (hasSnapshot()) { result = result && getSnapshot() .equals(other.getSnapshot()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasDone()) { hash = (37 * hash) + DONE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getDone()); } if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code IsProcedureDoneResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); done_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.done_ = done_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (snapshotBuilder_ == null) { result.snapshot_ = snapshot_; } else { result.snapshot_ = snapshotBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance()) return this; if (other.hasDone()) { setDone(other.getDone()); } if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasSnapshot()) { if (!getSnapshot().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bool done = 1 [default = false]; private boolean done_ ; /** * <code>optional bool done = 1 [default = false];</code> */ public boolean hasDone() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool done = 1 [default = false];</code> */ public boolean getDone() { return done_; } /** * <code>optional bool done = 1 [default = false];</code> */ public Builder setDone(boolean value) { bitField0_ |= 0x00000001; done_ = value; onChanged(); return this; } /** * <code>optional bool done = 1 [default = false];</code> */ public Builder clearDone() { bitField0_ = (bitField0_ & ~0x00000001); done_ = false; onChanged(); return this; } // optional .ProcedureDescription snapshot = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> snapshotBuilder_; /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; onChanged(); } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public Builder setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } onChanged(); } else { snapshotBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_; } } /** * <code>optional .ProcedureDescription snapshot = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } // @@protoc_insertion_point(builder_scope:IsProcedureDoneResponse) } static { defaultInstance = new IsProcedureDoneResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsProcedureDoneResponse) } /** * Protobuf service {@code MasterService} */ public static abstract class MasterService implements com.google.protobuf.Service { protected MasterService() {} public interface Interface { /** * <code>rpc GetSchemaAlterStatus(.GetSchemaAlterStatusRequest) returns (.GetSchemaAlterStatusResponse);</code> * * <pre> ** Used by the client to get the number of regions that have received the updated schema * </pre> */ public abstract void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done); /** * <code>rpc GetTableDescriptors(.GetTableDescriptorsRequest) returns (.GetTableDescriptorsResponse);</code> * * <pre> ** Get list of TableDescriptors for requested tables. * </pre> */ public abstract void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done); /** * <code>rpc GetTableNames(.GetTableNamesRequest) returns (.GetTableNamesResponse);</code> * * <pre> ** Get the list of table names. * </pre> */ public abstract void getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done); /** * <code>rpc GetClusterStatus(.GetClusterStatusRequest) returns (.GetClusterStatusResponse);</code> * * <pre> ** Return cluster status. * </pre> */ public abstract void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done); /** * <code>rpc IsMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code> * * <pre> ** return true if master is available * </pre> */ public abstract void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done); /** * <code>rpc AddColumn(.AddColumnRequest) returns (.AddColumnResponse);</code> * * <pre> ** Adds a column to the specified table. * </pre> */ public abstract void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done); /** * <code>rpc DeleteColumn(.DeleteColumnRequest) returns (.DeleteColumnResponse);</code> * * <pre> ** Deletes a column from the specified table. Table must be disabled. * </pre> */ public abstract void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done); /** * <code>rpc ModifyColumn(.ModifyColumnRequest) returns (.ModifyColumnResponse);</code> * * <pre> ** Modifies an existing column on the specified table. * </pre> */ public abstract void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done); /** * <code>rpc MoveRegion(.MoveRegionRequest) returns (.MoveRegionResponse);</code> * * <pre> ** Move the region region to the destination server. * </pre> */ public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done); /** * <code>rpc DispatchMergingRegions(.DispatchMergingRegionsRequest) returns (.DispatchMergingRegionsResponse);</code> * * <pre> ** Master dispatch merging the regions * </pre> */ public abstract void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done); /** * <code>rpc AssignRegion(.AssignRegionRequest) returns (.AssignRegionResponse);</code> * * <pre> ** Assign a region to a server chosen at random. * </pre> */ public abstract void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done); /** * <code>rpc UnassignRegion(.UnassignRegionRequest) returns (.UnassignRegionResponse);</code> * * <pre> ** * Unassign a region from current hosting regionserver. Region will then be * assigned to a regionserver chosen at random. Region could be reassigned * back to the same server. Use MoveRegion if you want * to control the region movement. * </pre> */ public abstract void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done); /** * <code>rpc OfflineRegion(.OfflineRegionRequest) returns (.OfflineRegionResponse);</code> * * <pre> ** * Offline a region from the assignment manager's in-memory state. The * region should be in a closed state and there will be no attempt to * automatically reassign the region as in unassign. This is a special * method, and should only be used by experts or hbck. * </pre> */ public abstract void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done); /** * <code>rpc DeleteTable(.DeleteTableRequest) returns (.DeleteTableResponse);</code> * * <pre> ** Deletes a table * </pre> */ public abstract void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done); /** * <code>rpc truncateTable(.TruncateTableRequest) returns (.TruncateTableResponse);</code> * * <pre> ** Truncate a table * </pre> */ public abstract void truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done); /** * <code>rpc EnableTable(.EnableTableRequest) returns (.EnableTableResponse);</code> * * <pre> ** Puts the table on-line (only needed if table has been previously taken offline) * </pre> */ public abstract void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done); /** * <code>rpc DisableTable(.DisableTableRequest) returns (.DisableTableResponse);</code> * * <pre> ** Take table offline * </pre> */ public abstract void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done); /** * <code>rpc ModifyTable(.ModifyTableRequest) returns (.ModifyTableResponse);</code> * * <pre> ** Modify a table's metadata * </pre> */ public abstract void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done); /** * <code>rpc CreateTable(.CreateTableRequest) returns (.CreateTableResponse);</code> * * <pre> ** Creates a new table asynchronously * </pre> */ public abstract void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done); /** * <code>rpc Shutdown(.ShutdownRequest) returns (.ShutdownResponse);</code> * * <pre> ** Shutdown an HBase cluster. * </pre> */ public abstract void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done); /** * <code>rpc StopMaster(.StopMasterRequest) returns (.StopMasterResponse);</code> * * <pre> ** Stop HBase Master only. Does not shutdown the cluster. * </pre> */ public abstract void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done); /** * <code>rpc Balance(.BalanceRequest) returns (.BalanceResponse);</code> * * <pre> ** * Run the balancer. Will run the balancer and if regions to move, it will * go ahead and do the reassignments. Can NOT run for various reasons. * Check logs. * </pre> */ public abstract void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done); /** * <code>rpc SetBalancerRunning(.SetBalancerRunningRequest) returns (.SetBalancerRunningResponse);</code> * * <pre> ** * Turn the load balancer on or off. * If synchronous is true, it waits until current balance() call, if outstanding, to return. * </pre> */ public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done); /** * <code>rpc RunCatalogScan(.RunCatalogScanRequest) returns (.RunCatalogScanResponse);</code> * * <pre> ** Get a run of the catalog janitor * </pre> */ public abstract void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done); /** * <code>rpc EnableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse);</code> * * <pre> ** * Enable the catalog janitor on or off. * </pre> */ public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done); /** * <code>rpc IsCatalogJanitorEnabled(.IsCatalogJanitorEnabledRequest) returns (.IsCatalogJanitorEnabledResponse);</code> * * <pre> ** * Query whether the catalog janitor is enabled. * </pre> */ public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done); /** * <code>rpc ExecMasterService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code> * * <pre> ** * Call a master coprocessor endpoint * </pre> */ public abstract void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); /** * <code>rpc Snapshot(.SnapshotRequest) returns (.SnapshotResponse);</code> * * <pre> ** * Create a snapshot for the given table. * </pre> */ public abstract void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done); /** * <code>rpc GetCompletedSnapshots(.GetCompletedSnapshotsRequest) returns (.GetCompletedSnapshotsResponse);</code> * * <pre> ** * Get completed snapshots. * Returns a list of snapshot descriptors for completed snapshots * </pre> */ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done); /** * <code>rpc DeleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse);</code> * * <pre> ** * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot. * </pre> */ public abstract void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done); /** * <code>rpc IsSnapshotDone(.IsSnapshotDoneRequest) returns (.IsSnapshotDoneResponse);</code> * * <pre> ** * Determine if the snapshot is done yet. * </pre> */ public abstract void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done); /** * <code>rpc RestoreSnapshot(.RestoreSnapshotRequest) returns (.RestoreSnapshotResponse);</code> * * <pre> ** * Restore a snapshot * </pre> */ public abstract void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done); /** * <code>rpc IsRestoreSnapshotDone(.IsRestoreSnapshotDoneRequest) returns (.IsRestoreSnapshotDoneResponse);</code> * * <pre> ** * Determine if the snapshot restore is done yet. * </pre> */ public abstract void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done); /** * <code>rpc ExecProcedure(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code> * * <pre> ** * Execute a distributed procedure. * </pre> */ public abstract void execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done); /** * <code>rpc ExecProcedureWithRet(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code> * * <pre> ** * Execute a distributed procedure with return data. * </pre> */ public abstract void execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done); /** * <code>rpc IsProcedureDone(.IsProcedureDoneRequest) returns (.IsProcedureDoneResponse);</code> * * <pre> ** * Determine if the procedure is done yet. * </pre> */ public abstract void isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done); /** * <code>rpc ModifyNamespace(.ModifyNamespaceRequest) returns (.ModifyNamespaceResponse);</code> * * <pre> ** Modify a namespace's metadata * </pre> */ public abstract void modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done); /** * <code>rpc CreateNamespace(.CreateNamespaceRequest) returns (.CreateNamespaceResponse);</code> * * <pre> ** Creates a new namespace synchronously * </pre> */ public abstract void createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done); /** * <code>rpc DeleteNamespace(.DeleteNamespaceRequest) returns (.DeleteNamespaceResponse);</code> * * <pre> ** Deletes namespace synchronously * </pre> */ public abstract void deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done); /** * <code>rpc GetNamespaceDescriptor(.GetNamespaceDescriptorRequest) returns (.GetNamespaceDescriptorResponse);</code> * * <pre> ** Get a namespace descriptor by name * </pre> */ public abstract void getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done); /** * <code>rpc ListNamespaceDescriptors(.ListNamespaceDescriptorsRequest) returns (.ListNamespaceDescriptorsResponse);</code> * * <pre> ** returns a list of namespaces * </pre> */ public abstract void listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done); /** * <code>rpc ListTableDescriptorsByNamespace(.ListTableDescriptorsByNamespaceRequest) returns (.ListTableDescriptorsByNamespaceResponse);</code> * * <pre> ** returns a list of tables for a given namespace * </pre> */ public abstract void listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done); /** * <code>rpc ListTableNamesByNamespace(.ListTableNamesByNamespaceRequest) returns (.ListTableNamesByNamespaceResponse);</code> * * <pre> ** returns a list of tables for a given namespace * </pre> */ public abstract void listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MasterService() { @java.lang.Override public void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done) { impl.getSchemaAlterStatus(controller, request, done); } @java.lang.Override public void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done) { impl.getTableDescriptors(controller, request, done); } @java.lang.Override public void getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done) { impl.getTableNames(controller, request, done); } @java.lang.Override public void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done) { impl.getClusterStatus(controller, request, done); } @java.lang.Override public void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done) { impl.isMasterRunning(controller, request, done); } @java.lang.Override public void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done) { impl.addColumn(controller, request, done); } @java.lang.Override public void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done) { impl.deleteColumn(controller, request, done); } @java.lang.Override public void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done) { impl.modifyColumn(controller, request, done); } @java.lang.Override public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done) { impl.moveRegion(controller, request, done); } @java.lang.Override public void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done) { impl.dispatchMergingRegions(controller, request, done); } @java.lang.Override public void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done) { impl.assignRegion(controller, request, done); } @java.lang.Override public void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done) { impl.unassignRegion(controller, request, done); } @java.lang.Override public void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done) { impl.offlineRegion(controller, request, done); } @java.lang.Override public void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done) { impl.deleteTable(controller, request, done); } @java.lang.Override public void truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done) { impl.truncateTable(controller, request, done); } @java.lang.Override public void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done) { impl.enableTable(controller, request, done); } @java.lang.Override public void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done) { impl.disableTable(controller, request, done); } @java.lang.Override public void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done) { impl.modifyTable(controller, request, done); } @java.lang.Override public void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done) { impl.createTable(controller, request, done); } @java.lang.Override public void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done) { impl.shutdown(controller, request, done); } @java.lang.Override public void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done) { impl.stopMaster(controller, request, done); } @java.lang.Override public void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done) { impl.balance(controller, request, done); } @java.lang.Override public void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done) { impl.setBalancerRunning(controller, request, done); } @java.lang.Override public void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done) { impl.runCatalogScan(controller, request, done); } @java.lang.Override public void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done) { impl.enableCatalogJanitor(controller, request, done); } @java.lang.Override public void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done) { impl.isCatalogJanitorEnabled(controller, request, done); } @java.lang.Override public void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { impl.execMasterService(controller, request, done); } @java.lang.Override public void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done) { impl.snapshot(controller, request, done); } @java.lang.Override public void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done) { impl.getCompletedSnapshots(controller, request, done); } @java.lang.Override public void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done) { impl.deleteSnapshot(controller, request, done); } @java.lang.Override public void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done) { impl.isSnapshotDone(controller, request, done); } @java.lang.Override public void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done) { impl.restoreSnapshot(controller, request, done); } @java.lang.Override public void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done) { impl.isRestoreSnapshotDone(controller, request, done); } @java.lang.Override public void execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) { impl.execProcedure(controller, request, done); } @java.lang.Override public void execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) { impl.execProcedureWithRet(controller, request, done); } @java.lang.Override public void isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done) { impl.isProcedureDone(controller, request, done); } @java.lang.Override public void modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done) { impl.modifyNamespace(controller, request, done); } @java.lang.Override public void createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done) { impl.createNamespace(controller, request, done); } @java.lang.Override public void deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done) { impl.deleteNamespace(controller, request, done); } @java.lang.Override public void getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done) { impl.getNamespaceDescriptor(controller, request, done); } @java.lang.Override public void listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done) { impl.listNamespaceDescriptors(controller, request, done); } @java.lang.Override public void listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done) { impl.listTableDescriptorsByNamespace(controller, request, done); } @java.lang.Override public void listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done) { impl.listTableNamesByNamespace(controller, request, done); } }; } public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request) throws com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)request); case 1: return impl.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)request); case 2: return impl.getTableNames(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)request); case 3: return impl.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)request); case 4: return impl.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request); case 5: return impl.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)request); case 6: return impl.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)request); case 7: return impl.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)request); case 8: return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request); case 9: return impl.dispatchMergingRegions(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)request); case 10: return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request); case 11: return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request); case 12: return impl.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)request); case 13: return impl.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)request); case 14: return impl.truncateTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)request); case 15: return impl.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)request); case 16: return impl.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)request); case 17: return impl.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)request); case 18: return impl.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)request); case 19: return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request); case 20: return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request); case 21: return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request); case 22: return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)request); case 23: return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request); case 24: return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request); case 25: return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request); case 26: return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); case 27: return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request); case 28: return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request); case 29: return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request); case 30: return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request); case 31: return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request); case 32: return impl.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request); case 33: return impl.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request); case 34: return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request); case 35: return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request); case 36: return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request); case 37: return impl.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request); case 38: return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request); case 39: return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request); case 40: return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request); case 41: return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request); case 42: return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); case 20: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); case 21: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); case 22: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance(); case 24: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance(); case 25: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); case 26: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 27: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); case 28: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); case 29: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); case 30: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); case 31: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); case 32: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance(); case 33: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 34: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 35: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); case 36: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); case 37: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); case 38: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); case 39: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); case 40: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); case 41: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); case 20: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); case 21: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); case 22: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(); case 24: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(); case 25: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); case 26: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 27: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); case 28: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); case 29: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); case 30: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); case 31: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); case 32: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(); case 33: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 34: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 35: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); case 36: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); case 37: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); case 38: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); case 39: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); case 40: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); case 41: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } /** * <code>rpc GetSchemaAlterStatus(.GetSchemaAlterStatusRequest) returns (.GetSchemaAlterStatusResponse);</code> * * <pre> ** Used by the client to get the number of regions that have received the updated schema * </pre> */ public abstract void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done); /** * <code>rpc GetTableDescriptors(.GetTableDescriptorsRequest) returns (.GetTableDescriptorsResponse);</code> * * <pre> ** Get list of TableDescriptors for requested tables. * </pre> */ public abstract void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done); /** * <code>rpc GetTableNames(.GetTableNamesRequest) returns (.GetTableNamesResponse);</code> * * <pre> ** Get the list of table names. * </pre> */ public abstract void getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done); /** * <code>rpc GetClusterStatus(.GetClusterStatusRequest) returns (.GetClusterStatusResponse);</code> * * <pre> ** Return cluster status. * </pre> */ public abstract void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done); /** * <code>rpc IsMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code> * * <pre> ** return true if master is available * </pre> */ public abstract void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done); /** * <code>rpc AddColumn(.AddColumnRequest) returns (.AddColumnResponse);</code> * * <pre> ** Adds a column to the specified table. * </pre> */ public abstract void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done); /** * <code>rpc DeleteColumn(.DeleteColumnRequest) returns (.DeleteColumnResponse);</code> * * <pre> ** Deletes a column from the specified table. Table must be disabled. * </pre> */ public abstract void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done); /** * <code>rpc ModifyColumn(.ModifyColumnRequest) returns (.ModifyColumnResponse);</code> * * <pre> ** Modifies an existing column on the specified table. * </pre> */ public abstract void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done); /** * <code>rpc MoveRegion(.MoveRegionRequest) returns (.MoveRegionResponse);</code> * * <pre> ** Move the region region to the destination server. * </pre> */ public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done); /** * <code>rpc DispatchMergingRegions(.DispatchMergingRegionsRequest) returns (.DispatchMergingRegionsResponse);</code> * * <pre> ** Master dispatch merging the regions * </pre> */ public abstract void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done); /** * <code>rpc AssignRegion(.AssignRegionRequest) returns (.AssignRegionResponse);</code> * * <pre> ** Assign a region to a server chosen at random. * </pre> */ public abstract void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done); /** * <code>rpc UnassignRegion(.UnassignRegionRequest) returns (.UnassignRegionResponse);</code> * * <pre> ** * Unassign a region from current hosting regionserver. Region will then be * assigned to a regionserver chosen at random. Region could be reassigned * back to the same server. Use MoveRegion if you want * to control the region movement. * </pre> */ public abstract void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done); /** * <code>rpc OfflineRegion(.OfflineRegionRequest) returns (.OfflineRegionResponse);</code> * * <pre> ** * Offline a region from the assignment manager's in-memory state. The * region should be in a closed state and there will be no attempt to * automatically reassign the region as in unassign. This is a special * method, and should only be used by experts or hbck. * </pre> */ public abstract void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done); /** * <code>rpc DeleteTable(.DeleteTableRequest) returns (.DeleteTableResponse);</code> * * <pre> ** Deletes a table * </pre> */ public abstract void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done); /** * <code>rpc truncateTable(.TruncateTableRequest) returns (.TruncateTableResponse);</code> * * <pre> ** Truncate a table * </pre> */ public abstract void truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done); /** * <code>rpc EnableTable(.EnableTableRequest) returns (.EnableTableResponse);</code> * * <pre> ** Puts the table on-line (only needed if table has been previously taken offline) * </pre> */ public abstract void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done); /** * <code>rpc DisableTable(.DisableTableRequest) returns (.DisableTableResponse);</code> * * <pre> ** Take table offline * </pre> */ public abstract void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done); /** * <code>rpc ModifyTable(.ModifyTableRequest) returns (.ModifyTableResponse);</code> * * <pre> ** Modify a table's metadata * </pre> */ public abstract void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done); /** * <code>rpc CreateTable(.CreateTableRequest) returns (.CreateTableResponse);</code> * * <pre> ** Creates a new table asynchronously * </pre> */ public abstract void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done); /** * <code>rpc Shutdown(.ShutdownRequest) returns (.ShutdownResponse);</code> * * <pre> ** Shutdown an HBase cluster. * </pre> */ public abstract void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done); /** * <code>rpc StopMaster(.StopMasterRequest) returns (.StopMasterResponse);</code> * * <pre> ** Stop HBase Master only. Does not shutdown the cluster. * </pre> */ public abstract void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done); /** * <code>rpc Balance(.BalanceRequest) returns (.BalanceResponse);</code> * * <pre> ** * Run the balancer. Will run the balancer and if regions to move, it will * go ahead and do the reassignments. Can NOT run for various reasons. * Check logs. * </pre> */ public abstract void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done); /** * <code>rpc SetBalancerRunning(.SetBalancerRunningRequest) returns (.SetBalancerRunningResponse);</code> * * <pre> ** * Turn the load balancer on or off. * If synchronous is true, it waits until current balance() call, if outstanding, to return. * </pre> */ public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done); /** * <code>rpc RunCatalogScan(.RunCatalogScanRequest) returns (.RunCatalogScanResponse);</code> * * <pre> ** Get a run of the catalog janitor * </pre> */ public abstract void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done); /** * <code>rpc EnableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse);</code> * * <pre> ** * Enable the catalog janitor on or off. * </pre> */ public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done); /** * <code>rpc IsCatalogJanitorEnabled(.IsCatalogJanitorEnabledRequest) returns (.IsCatalogJanitorEnabledResponse);</code> * * <pre> ** * Query whether the catalog janitor is enabled. * </pre> */ public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done); /** * <code>rpc ExecMasterService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code> * * <pre> ** * Call a master coprocessor endpoint * </pre> */ public abstract void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); /** * <code>rpc Snapshot(.SnapshotRequest) returns (.SnapshotResponse);</code> * * <pre> ** * Create a snapshot for the given table. * </pre> */ public abstract void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done); /** * <code>rpc GetCompletedSnapshots(.GetCompletedSnapshotsRequest) returns (.GetCompletedSnapshotsResponse);</code> * * <pre> ** * Get completed snapshots. * Returns a list of snapshot descriptors for completed snapshots * </pre> */ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done); /** * <code>rpc DeleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse);</code> * * <pre> ** * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot. * </pre> */ public abstract void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done); /** * <code>rpc IsSnapshotDone(.IsSnapshotDoneRequest) returns (.IsSnapshotDoneResponse);</code> * * <pre> ** * Determine if the snapshot is done yet. * </pre> */ public abstract void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done); /** * <code>rpc RestoreSnapshot(.RestoreSnapshotRequest) returns (.RestoreSnapshotResponse);</code> * * <pre> ** * Restore a snapshot * </pre> */ public abstract void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done); /** * <code>rpc IsRestoreSnapshotDone(.IsRestoreSnapshotDoneRequest) returns (.IsRestoreSnapshotDoneResponse);</code> * * <pre> ** * Determine if the snapshot restore is done yet. * </pre> */ public abstract void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done); /** * <code>rpc ExecProcedure(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code> * * <pre> ** * Execute a distributed procedure. * </pre> */ public abstract void execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done); /** * <code>rpc ExecProcedureWithRet(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code> * * <pre> ** * Execute a distributed procedure with return data. * </pre> */ public abstract void execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done); /** * <code>rpc IsProcedureDone(.IsProcedureDoneRequest) returns (.IsProcedureDoneResponse);</code> * * <pre> ** * Determine if the procedure is done yet. * </pre> */ public abstract void isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done); /** * <code>rpc ModifyNamespace(.ModifyNamespaceRequest) returns (.ModifyNamespaceResponse);</code> * * <pre> ** Modify a namespace's metadata * </pre> */ public abstract void modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done); /** * <code>rpc CreateNamespace(.CreateNamespaceRequest) returns (.CreateNamespaceResponse);</code> * * <pre> ** Creates a new namespace synchronously * </pre> */ public abstract void createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done); /** * <code>rpc DeleteNamespace(.DeleteNamespaceRequest) returns (.DeleteNamespaceResponse);</code> * * <pre> ** Deletes namespace synchronously * </pre> */ public abstract void deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done); /** * <code>rpc GetNamespaceDescriptor(.GetNamespaceDescriptorRequest) returns (.GetNamespaceDescriptorResponse);</code> * * <pre> ** Get a namespace descriptor by name * </pre> */ public abstract void getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done); /** * <code>rpc ListNamespaceDescriptors(.ListNamespaceDescriptorsRequest) returns (.ListNamespaceDescriptorsResponse);</code> * * <pre> ** returns a list of namespaces * </pre> */ public abstract void listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done); /** * <code>rpc ListTableDescriptorsByNamespace(.ListTableDescriptorsByNamespaceRequest) returns (.ListTableDescriptorsByNamespaceResponse);</code> * * <pre> ** returns a list of tables for a given namespace * </pre> */ public abstract void listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done); /** * <code>rpc ListTableNamesByNamespace(.ListTableNamesByNamespaceRequest) returns (.ListTableNamesByNamespaceResponse);</code> * * <pre> ** returns a list of tables for a given namespace * </pre> */ public abstract void listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request, com.google.protobuf.RpcCallback< com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse>specializeCallback( done)); return; case 1: this.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse>specializeCallback( done)); return; case 2: this.getTableNames(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse>specializeCallback( done)); return; case 3: this.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse>specializeCallback( done)); return; case 4: this.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse>specializeCallback( done)); return; case 5: this.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse>specializeCallback( done)); return; case 6: this.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse>specializeCallback( done)); return; case 7: this.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse>specializeCallback( done)); return; case 8: this.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse>specializeCallback( done)); return; case 9: this.dispatchMergingRegions(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse>specializeCallback( done)); return; case 10: this.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse>specializeCallback( done)); return; case 11: this.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse>specializeCallback( done)); return; case 12: this.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse>specializeCallback( done)); return; case 13: this.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse>specializeCallback( done)); return; case 14: this.truncateTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse>specializeCallback( done)); return; case 15: this.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse>specializeCallback( done)); return; case 16: this.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse>specializeCallback( done)); return; case 17: this.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse>specializeCallback( done)); return; case 18: this.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse>specializeCallback( done)); return; case 19: this.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse>specializeCallback( done)); return; case 20: this.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse>specializeCallback( done)); return; case 21: this.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse>specializeCallback( done)); return; case 22: this.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse>specializeCallback( done)); return; case 23: this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse>specializeCallback( done)); return; case 24: this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse>specializeCallback( done)); return; case 25: this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse>specializeCallback( done)); return; case 26: this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback( done)); return; case 27: this.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse>specializeCallback( done)); return; case 28: this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse>specializeCallback( done)); return; case 29: this.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse>specializeCallback( done)); return; case 30: this.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse>specializeCallback( done)); return; case 31: this.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse>specializeCallback( done)); return; case 32: this.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse>specializeCallback( done)); return; case 33: this.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse>specializeCallback( done)); return; case 34: this.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse>specializeCallback( done)); return; case 35: this.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse>specializeCallback( done)); return; case 36: this.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse>specializeCallback( done)); return; case 37: this.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse>specializeCallback( done)); return; case 38: this.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse>specializeCallback( done)); return; case 39: this.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse>specializeCallback( done)); return; case 40: this.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse>specializeCallback( done)); return; case 41: this.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse>specializeCallback( done)); return; case 42: this.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); case 20: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); case 21: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); case 22: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance(); case 24: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance(); case 25: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); case 26: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 27: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); case 28: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); case 29: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); case 30: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); case 31: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); case 32: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance(); case 33: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 34: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 35: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); case 36: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); case 37: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); case 38: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); case 39: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); case 40: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); case 41: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); case 20: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); case 21: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); case 22: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(); case 24: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(); case 25: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); case 26: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 27: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); case 28: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); case 29: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); case 30: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); case 31: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); case 32: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(); case 33: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 34: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 35: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); case 36: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); case 37: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); case 38: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); case 39: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); case 40: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); case 41: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } private final com.google.protobuf.RpcChannel channel; public com.google.protobuf.RpcChannel getChannel() { return channel; } public void getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance())); } public void getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance())); } public void getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance())); } public void getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done) { channel.callMethod( getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance())); } public void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done) { channel.callMethod( getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance())); } public void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done) { channel.callMethod( getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance())); } public void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done) { channel.callMethod( getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance())); } public void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done) { channel.callMethod( getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance())); } public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance())); } public void dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done) { channel.callMethod( getDescriptor().getMethods().get(9), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance())); } public void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(10), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance())); } public void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(11), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance())); } public void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(12), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance())); } public void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(13), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance())); } public void truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(14), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance())); } public void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(15), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance())); } public void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(16), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance())); } public void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(17), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance())); } public void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(18), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance())); } public void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done) { channel.callMethod( getDescriptor().getMethods().get(19), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance())); } public void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done) { channel.callMethod( getDescriptor().getMethods().get(20), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance())); } public void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(21), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance())); } public void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done) { channel.callMethod( getDescriptor().getMethods().get(22), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance())); } public void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done) { channel.callMethod( getDescriptor().getMethods().get(23), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance())); } public void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done) { channel.callMethod( getDescriptor().getMethods().get(24), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance())); } public void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done) { channel.callMethod( getDescriptor().getMethods().get(25), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance())); } public void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(26), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); } public void snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done) { channel.callMethod( getDescriptor().getMethods().get(27), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance())); } public void getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done) { channel.callMethod( getDescriptor().getMethods().get(28), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance())); } public void deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done) { channel.callMethod( getDescriptor().getMethods().get(29), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance())); } public void isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done) { channel.callMethod( getDescriptor().getMethods().get(30), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance())); } public void restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done) { channel.callMethod( getDescriptor().getMethods().get(31), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance())); } public void isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done) { channel.callMethod( getDescriptor().getMethods().get(32), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance())); } public void execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) { channel.callMethod( getDescriptor().getMethods().get(33), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance())); } public void execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) { channel.callMethod( getDescriptor().getMethods().get(34), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance())); } public void isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done) { channel.callMethod( getDescriptor().getMethods().get(35), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance())); } public void modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(36), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance())); } public void createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(37), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance())); } public void deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(38), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance())); } public void getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done) { channel.callMethod( getDescriptor().getMethods().get(39), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance())); } public void listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done) { channel.callMethod( getDescriptor().getMethods().get(40), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance())); } public void listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(41), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance())); } public void listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(42), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance())); } } public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request) throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } private final com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(9), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(10), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(11), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(12), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(13), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(14), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(15), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(16), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(17), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(18), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(19), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(20), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(21), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(22), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(23), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(24), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(25), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(26), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(27), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(28), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(29), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(30), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(31), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(32), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(33), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(34), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(35), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(36), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(37), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(38), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(39), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(40), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(41), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(42), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance()); } } // @@protoc_insertion_point(class_scope:MasterService) } private static com.google.protobuf.Descriptors.Descriptor internal_static_AddColumnRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AddColumnRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AddColumnResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AddColumnResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteColumnRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteColumnRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteColumnResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteColumnResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyColumnRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyColumnRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyColumnResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyColumnResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_MoveRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MoveRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_MoveRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MoveRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DispatchMergingRegionsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DispatchMergingRegionsRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DispatchMergingRegionsResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DispatchMergingRegionsResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AssignRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AssignRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AssignRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AssignRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_UnassignRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_UnassignRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_UnassignRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_UnassignRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_OfflineRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_OfflineRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_OfflineRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_OfflineRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CreateTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CreateTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CreateTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CreateTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_TruncateTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TruncateTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_TruncateTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_TruncateTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DisableTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DisableTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DisableTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DisableTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CreateNamespaceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CreateNamespaceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CreateNamespaceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CreateNamespaceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteNamespaceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteNamespaceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteNamespaceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteNamespaceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyNamespaceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyNamespaceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyNamespaceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyNamespaceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetNamespaceDescriptorRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetNamespaceDescriptorResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ListNamespaceDescriptorsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ListNamespaceDescriptorsResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ListTableDescriptorsByNamespaceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ListTableDescriptorsByNamespaceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ListTableNamesByNamespaceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ListTableNamesByNamespaceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ShutdownRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ShutdownRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ShutdownResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ShutdownResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_StopMasterRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_StopMasterRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_StopMasterResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_StopMasterResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_BalanceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BalanceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_BalanceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BalanceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_SetBalancerRunningRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SetBalancerRunningRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_SetBalancerRunningResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SetBalancerRunningResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RunCatalogScanRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RunCatalogScanRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RunCatalogScanResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RunCatalogScanResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableCatalogJanitorRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableCatalogJanitorResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsCatalogJanitorEnabledRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsCatalogJanitorEnabledResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_SnapshotRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SnapshotRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_SnapshotResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SnapshotResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetCompletedSnapshotsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetCompletedSnapshotsResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteSnapshotRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteSnapshotRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteSnapshotResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteSnapshotResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RestoreSnapshotRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RestoreSnapshotRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RestoreSnapshotResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RestoreSnapshotResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsSnapshotDoneRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsSnapshotDoneRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsSnapshotDoneResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsSnapshotDoneResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsRestoreSnapshotDoneRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsRestoreSnapshotDoneResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetSchemaAlterStatusRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetSchemaAlterStatusResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetTableDescriptorsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetTableDescriptorsRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetTableDescriptorsResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetTableDescriptorsResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetTableNamesRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetTableNamesRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetTableNamesResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetTableNamesResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetClusterStatusRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetClusterStatusRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_GetClusterStatusResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetClusterStatusResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsMasterRunningRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsMasterRunningRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsMasterRunningResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsMasterRunningResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ExecProcedureRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ExecProcedureRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ExecProcedureResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ExecProcedureResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsProcedureDoneRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsProcedureDoneRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsProcedureDoneResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsProcedureDoneResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\014Master.proto\032\013HBase.proto\032\014Client.prot" + "o\032\023ClusterStatus.proto\"`\n\020AddColumnReque" + "st\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022,\n\017co" + "lumn_families\030\002 \002(\0132\023.ColumnFamilySchema" + "\"\023\n\021AddColumnResponse\"J\n\023DeleteColumnReq" + "uest\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\023\n\013" + "column_name\030\002 \002(\014\"\026\n\024DeleteColumnRespons" + "e\"c\n\023ModifyColumnRequest\022\036\n\ntable_name\030\001" + " \002(\0132\n.TableName\022,\n\017column_families\030\002 \002(" + "\0132\023.ColumnFamilySchema\"\026\n\024ModifyColumnRe", "sponse\"\\\n\021MoveRegionRequest\022 \n\006region\030\001 " + "\002(\0132\020.RegionSpecifier\022%\n\020dest_server_nam" + "e\030\002 \001(\0132\013.ServerName\"\024\n\022MoveRegionRespon" + "se\"\200\001\n\035DispatchMergingRegionsRequest\022\"\n\010" + "region_a\030\001 \002(\0132\020.RegionSpecifier\022\"\n\010regi" + "on_b\030\002 \002(\0132\020.RegionSpecifier\022\027\n\010forcible" + "\030\003 \001(\010:\005false\" \n\036DispatchMergingRegionsR" + "esponse\"7\n\023AssignRegionRequest\022 \n\006region" + "\030\001 \002(\0132\020.RegionSpecifier\"\026\n\024AssignRegion" + "Response\"O\n\025UnassignRegionRequest\022 \n\006reg", "ion\030\001 \002(\0132\020.RegionSpecifier\022\024\n\005force\030\002 \001" + "(\010:\005false\"\030\n\026UnassignRegionResponse\"8\n\024O" + "fflineRegionRequest\022 \n\006region\030\001 \002(\0132\020.Re" + "gionSpecifier\"\027\n\025OfflineRegionResponse\"L" + "\n\022CreateTableRequest\022\"\n\014table_schema\030\001 \002" + "(\0132\014.TableSchema\022\022\n\nsplit_keys\030\002 \003(\014\"\025\n\023" + "CreateTableResponse\"4\n\022DeleteTableReques" + "t\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\"\025\n\023Del" + "eteTableResponse\"T\n\024TruncateTableRequest" + "\022\035\n\ttableName\030\001 \002(\0132\n.TableName\022\035\n\016prese", "rveSplits\030\002 \001(\010:\005false\"\027\n\025TruncateTableR" + "esponse\"4\n\022EnableTableRequest\022\036\n\ntable_n" + "ame\030\001 \002(\0132\n.TableName\"\025\n\023EnableTableResp" + "onse\"5\n\023DisableTableRequest\022\036\n\ntable_nam" + "e\030\001 \002(\0132\n.TableName\"\026\n\024DisableTableRespo" + "nse\"X\n\022ModifyTableRequest\022\036\n\ntable_name\030" + "\001 \002(\0132\n.TableName\022\"\n\014table_schema\030\002 \002(\0132" + "\014.TableSchema\"\025\n\023ModifyTableResponse\"K\n\026" + "CreateNamespaceRequest\0221\n\023namespaceDescr" + "iptor\030\001 \002(\0132\024.NamespaceDescriptor\"\031\n\027Cre", "ateNamespaceResponse\"/\n\026DeleteNamespaceR" + "equest\022\025\n\rnamespaceName\030\001 \002(\t\"\031\n\027DeleteN" + "amespaceResponse\"K\n\026ModifyNamespaceReque" + "st\0221\n\023namespaceDescriptor\030\001 \002(\0132\024.Namesp" + "aceDescriptor\"\031\n\027ModifyNamespaceResponse" + "\"6\n\035GetNamespaceDescriptorRequest\022\025\n\rnam" + "espaceName\030\001 \002(\t\"S\n\036GetNamespaceDescript" + "orResponse\0221\n\023namespaceDescriptor\030\001 \002(\0132" + "\024.NamespaceDescriptor\"!\n\037ListNamespaceDe" + "scriptorsRequest\"U\n ListNamespaceDescrip", "torsResponse\0221\n\023namespaceDescriptor\030\001 \003(" + "\0132\024.NamespaceDescriptor\"?\n&ListTableDesc" + "riptorsByNamespaceRequest\022\025\n\rnamespaceNa" + "me\030\001 \002(\t\"L\n\'ListTableDescriptorsByNamesp" + "aceResponse\022!\n\013tableSchema\030\001 \003(\0132\014.Table" + "Schema\"9\n ListTableNamesByNamespaceReque" + "st\022\025\n\rnamespaceName\030\001 \002(\t\"B\n!ListTableNa" + "mesByNamespaceResponse\022\035\n\ttableName\030\001 \003(" + "\0132\n.TableName\"\021\n\017ShutdownRequest\"\022\n\020Shut" + "downResponse\"\023\n\021StopMasterRequest\"\024\n\022Sto", "pMasterResponse\"\020\n\016BalanceRequest\"\'\n\017Bal" + "anceResponse\022\024\n\014balancer_ran\030\001 \002(\010\"<\n\031Se" + "tBalancerRunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n\013s" + "ynchronous\030\002 \001(\010\"8\n\032SetBalancerRunningRe" + "sponse\022\032\n\022prev_balance_value\030\001 \001(\010\"\027\n\025Ru" + "nCatalogScanRequest\"-\n\026RunCatalogScanRes" + "ponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033EnableCata" + "logJanitorRequest\022\016\n\006enable\030\001 \002(\010\"2\n\034Ena" + "bleCatalogJanitorResponse\022\022\n\nprev_value\030" + "\001 \001(\010\" \n\036IsCatalogJanitorEnabledRequest\"", "0\n\037IsCatalogJanitorEnabledResponse\022\r\n\005va" + "lue\030\001 \002(\010\"9\n\017SnapshotRequest\022&\n\010snapshot" + "\030\001 \002(\0132\024.SnapshotDescription\",\n\020Snapshot" + "Response\022\030\n\020expected_timeout\030\001 \002(\003\"\036\n\034Ge" + "tCompletedSnapshotsRequest\"H\n\035GetComplet" + "edSnapshotsResponse\022\'\n\tsnapshots\030\001 \003(\0132\024" + ".SnapshotDescription\"?\n\025DeleteSnapshotRe" + "quest\022&\n\010snapshot\030\001 \002(\0132\024.SnapshotDescri" + "ption\"\030\n\026DeleteSnapshotResponse\"@\n\026Resto" + "reSnapshotRequest\022&\n\010snapshot\030\001 \002(\0132\024.Sn", "apshotDescription\"\031\n\027RestoreSnapshotResp" + "onse\"?\n\025IsSnapshotDoneRequest\022&\n\010snapsho" + "t\030\001 \001(\0132\024.SnapshotDescription\"U\n\026IsSnaps" + "hotDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\022&\n\010" + "snapshot\030\002 \001(\0132\024.SnapshotDescription\"F\n\034" + "IsRestoreSnapshotDoneRequest\022&\n\010snapshot" + "\030\001 \001(\0132\024.SnapshotDescription\"4\n\035IsRestor" + "eSnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fal" + "se\"=\n\033GetSchemaAlterStatusRequest\022\036\n\ntab" + "le_name\030\001 \002(\0132\n.TableName\"T\n\034GetSchemaAl", "terStatusResponse\022\035\n\025yet_to_update_regio" + "ns\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\202\001\n\032GetT" + "ableDescriptorsRequest\022\037\n\013table_names\030\001 " + "\003(\0132\n.TableName\022\r\n\005regex\030\002 \001(\t\022!\n\022includ" + "e_sys_tables\030\003 \001(\010:\005false\022\021\n\tnamespace\030\004" + " \001(\t\"A\n\033GetTableDescriptorsResponse\022\"\n\014t" + "able_schema\030\001 \003(\0132\014.TableSchema\"[\n\024GetTa" + "bleNamesRequest\022\r\n\005regex\030\001 \001(\t\022!\n\022includ" + "e_sys_tables\030\002 \001(\010:\005false\022\021\n\tnamespace\030\003" + " \001(\t\"8\n\025GetTableNamesResponse\022\037\n\013table_n", "ames\030\001 \003(\0132\n.TableName\"\031\n\027GetClusterStat" + "usRequest\"B\n\030GetClusterStatusResponse\022&\n" + "\016cluster_status\030\001 \002(\0132\016.ClusterStatus\"\030\n" + "\026IsMasterRunningRequest\"4\n\027IsMasterRunni" + "ngResponse\022\031\n\021is_master_running\030\001 \002(\010\"@\n" + "\024ExecProcedureRequest\022(\n\tprocedure\030\001 \002(\013" + "2\025.ProcedureDescription\"F\n\025ExecProcedure" + "Response\022\030\n\020expected_timeout\030\001 \001(\003\022\023\n\013re" + "turn_data\030\002 \001(\014\"B\n\026IsProcedureDoneReques" + "t\022(\n\tprocedure\030\001 \001(\0132\025.ProcedureDescript", "ion\"W\n\027IsProcedureDoneResponse\022\023\n\004done\030\001" + " \001(\010:\005false\022\'\n\010snapshot\030\002 \001(\0132\025.Procedur" + "eDescription2\365\027\n\rMasterService\022S\n\024GetSch" + "emaAlterStatus\022\034.GetSchemaAlterStatusReq" + "uest\032\035.GetSchemaAlterStatusResponse\022P\n\023G" + "etTableDescriptors\022\033.GetTableDescriptors" + "Request\032\034.GetTableDescriptorsResponse\022>\n" + "\rGetTableNames\022\025.GetTableNamesRequest\032\026." + "GetTableNamesResponse\022G\n\020GetClusterStatu" + "s\022\030.GetClusterStatusRequest\032\031.GetCluster", "StatusResponse\022D\n\017IsMasterRunning\022\027.IsMa" + "sterRunningRequest\032\030.IsMasterRunningResp" + "onse\0222\n\tAddColumn\022\021.AddColumnRequest\032\022.A" + "ddColumnResponse\022;\n\014DeleteColumn\022\024.Delet" + "eColumnRequest\032\025.DeleteColumnResponse\022;\n" + "\014ModifyColumn\022\024.ModifyColumnRequest\032\025.Mo" + "difyColumnResponse\0225\n\nMoveRegion\022\022.MoveR" + "egionRequest\032\023.MoveRegionResponse\022Y\n\026Dis" + "patchMergingRegions\022\036.DispatchMergingReg" + "ionsRequest\032\037.DispatchMergingRegionsResp", "onse\022;\n\014AssignRegion\022\024.AssignRegionReque" + "st\032\025.AssignRegionResponse\022A\n\016UnassignReg" + "ion\022\026.UnassignRegionRequest\032\027.UnassignRe" + "gionResponse\022>\n\rOfflineRegion\022\025.OfflineR" + "egionRequest\032\026.OfflineRegionResponse\0228\n\013" + "DeleteTable\022\023.DeleteTableRequest\032\024.Delet" + "eTableResponse\022>\n\rtruncateTable\022\025.Trunca" + "teTableRequest\032\026.TruncateTableResponse\0228" + "\n\013EnableTable\022\023.EnableTableRequest\032\024.Ena" + "bleTableResponse\022;\n\014DisableTable\022\024.Disab", "leTableRequest\032\025.DisableTableResponse\0228\n" + "\013ModifyTable\022\023.ModifyTableRequest\032\024.Modi" + "fyTableResponse\0228\n\013CreateTable\022\023.CreateT" + "ableRequest\032\024.CreateTableResponse\022/\n\010Shu" + "tdown\022\020.ShutdownRequest\032\021.ShutdownRespon" + "se\0225\n\nStopMaster\022\022.StopMasterRequest\032\023.S" + "topMasterResponse\022,\n\007Balance\022\017.BalanceRe" + "quest\032\020.BalanceResponse\022M\n\022SetBalancerRu" + "nning\022\032.SetBalancerRunningRequest\032\033.SetB" + "alancerRunningResponse\022A\n\016RunCatalogScan", "\022\026.RunCatalogScanRequest\032\027.RunCatalogSca" + "nResponse\022S\n\024EnableCatalogJanitor\022\034.Enab" + "leCatalogJanitorRequest\032\035.EnableCatalogJ" + "anitorResponse\022\\\n\027IsCatalogJanitorEnable" + "d\022\037.IsCatalogJanitorEnabledRequest\032 .IsC" + "atalogJanitorEnabledResponse\022L\n\021ExecMast" + "erService\022\032.CoprocessorServiceRequest\032\033." + "CoprocessorServiceResponse\022/\n\010Snapshot\022\020" + ".SnapshotRequest\032\021.SnapshotResponse\022V\n\025G" + "etCompletedSnapshots\022\035.GetCompletedSnaps", "hotsRequest\032\036.GetCompletedSnapshotsRespo" + "nse\022A\n\016DeleteSnapshot\022\026.DeleteSnapshotRe" + "quest\032\027.DeleteSnapshotResponse\022A\n\016IsSnap" + "shotDone\022\026.IsSnapshotDoneRequest\032\027.IsSna" + "pshotDoneResponse\022D\n\017RestoreSnapshot\022\027.R" + "estoreSnapshotRequest\032\030.RestoreSnapshotR" + "esponse\022V\n\025IsRestoreSnapshotDone\022\035.IsRes" + "toreSnapshotDoneRequest\032\036.IsRestoreSnaps" + "hotDoneResponse\022>\n\rExecProcedure\022\025.ExecP" + "rocedureRequest\032\026.ExecProcedureResponse\022", "E\n\024ExecProcedureWithRet\022\025.ExecProcedureR" + "equest\032\026.ExecProcedureResponse\022D\n\017IsProc" + "edureDone\022\027.IsProcedureDoneRequest\032\030.IsP" + "rocedureDoneResponse\022D\n\017ModifyNamespace\022" + "\027.ModifyNamespaceRequest\032\030.ModifyNamespa" + "ceResponse\022D\n\017CreateNamespace\022\027.CreateNa" + "mespaceRequest\032\030.CreateNamespaceResponse" + "\022D\n\017DeleteNamespace\022\027.DeleteNamespaceReq" + "uest\032\030.DeleteNamespaceResponse\022Y\n\026GetNam" + "espaceDescriptor\022\036.GetNamespaceDescripto", "rRequest\032\037.GetNamespaceDescriptorRespons" + "e\022_\n\030ListNamespaceDescriptors\022 .ListName" + "spaceDescriptorsRequest\032!.ListNamespaceD" + "escriptorsResponse\022t\n\037ListTableDescripto" + "rsByNamespace\022\'.ListTableDescriptorsByNa" + "mespaceRequest\032(.ListTableDescriptorsByN" + "amespaceResponse\022b\n\031ListTableNamesByName" + "space\022!.ListTableNamesByNamespaceRequest" + "\032\".ListTableNamesByNamespaceResponseBB\n*" + "org.apache.hadoop.hbase.protobuf.generat", "edB\014MasterProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_AddColumnRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_AddColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddColumnRequest_descriptor, new java.lang.String[] { "TableName", "ColumnFamilies", }); internal_static_AddColumnResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_AddColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddColumnResponse_descriptor, new java.lang.String[] { }); internal_static_DeleteColumnRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_DeleteColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteColumnRequest_descriptor, new java.lang.String[] { "TableName", "ColumnName", }); internal_static_DeleteColumnResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_DeleteColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteColumnResponse_descriptor, new java.lang.String[] { }); internal_static_ModifyColumnRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_ModifyColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyColumnRequest_descriptor, new java.lang.String[] { "TableName", "ColumnFamilies", }); internal_static_ModifyColumnResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ModifyColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyColumnResponse_descriptor, new java.lang.String[] { }); internal_static_MoveRegionRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_MoveRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionRequest_descriptor, new java.lang.String[] { "Region", "DestServerName", }); internal_static_MoveRegionResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_MoveRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionResponse_descriptor, new java.lang.String[] { }); internal_static_DispatchMergingRegionsRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_DispatchMergingRegionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DispatchMergingRegionsRequest_descriptor, new java.lang.String[] { "RegionA", "RegionB", "Forcible", }); internal_static_DispatchMergingRegionsResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_DispatchMergingRegionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DispatchMergingRegionsResponse_descriptor, new java.lang.String[] { }); internal_static_AssignRegionRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_AssignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionRequest_descriptor, new java.lang.String[] { "Region", }); internal_static_AssignRegionResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_AssignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionResponse_descriptor, new java.lang.String[] { }); internal_static_UnassignRegionRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_UnassignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionRequest_descriptor, new java.lang.String[] { "Region", "Force", }); internal_static_UnassignRegionResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_UnassignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionResponse_descriptor, new java.lang.String[] { }); internal_static_OfflineRegionRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_OfflineRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OfflineRegionRequest_descriptor, new java.lang.String[] { "Region", }); internal_static_OfflineRegionResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_OfflineRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OfflineRegionResponse_descriptor, new java.lang.String[] { }); internal_static_CreateTableRequest_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_CreateTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateTableRequest_descriptor, new java.lang.String[] { "TableSchema", "SplitKeys", }); internal_static_CreateTableResponse_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_CreateTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateTableResponse_descriptor, new java.lang.String[] { }); internal_static_DeleteTableRequest_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_DeleteTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteTableRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_DeleteTableResponse_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_DeleteTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteTableResponse_descriptor, new java.lang.String[] { }); internal_static_TruncateTableRequest_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_TruncateTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TruncateTableRequest_descriptor, new java.lang.String[] { "TableName", "PreserveSplits", }); internal_static_TruncateTableResponse_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_TruncateTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TruncateTableResponse_descriptor, new java.lang.String[] { }); internal_static_EnableTableRequest_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_EnableTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableTableRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_EnableTableResponse_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_EnableTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableTableResponse_descriptor, new java.lang.String[] { }); internal_static_DisableTableRequest_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_DisableTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DisableTableRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_DisableTableResponse_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_DisableTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DisableTableResponse_descriptor, new java.lang.String[] { }); internal_static_ModifyTableRequest_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_ModifyTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyTableRequest_descriptor, new java.lang.String[] { "TableName", "TableSchema", }); internal_static_ModifyTableResponse_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_ModifyTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyTableResponse_descriptor, new java.lang.String[] { }); internal_static_CreateNamespaceRequest_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_CreateNamespaceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateNamespaceRequest_descriptor, new java.lang.String[] { "NamespaceDescriptor", }); internal_static_CreateNamespaceResponse_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_CreateNamespaceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateNamespaceResponse_descriptor, new java.lang.String[] { }); internal_static_DeleteNamespaceRequest_descriptor = getDescriptor().getMessageTypes().get(30); internal_static_DeleteNamespaceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteNamespaceRequest_descriptor, new java.lang.String[] { "NamespaceName", }); internal_static_DeleteNamespaceResponse_descriptor = getDescriptor().getMessageTypes().get(31); internal_static_DeleteNamespaceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteNamespaceResponse_descriptor, new java.lang.String[] { }); internal_static_ModifyNamespaceRequest_descriptor = getDescriptor().getMessageTypes().get(32); internal_static_ModifyNamespaceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyNamespaceRequest_descriptor, new java.lang.String[] { "NamespaceDescriptor", }); internal_static_ModifyNamespaceResponse_descriptor = getDescriptor().getMessageTypes().get(33); internal_static_ModifyNamespaceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyNamespaceResponse_descriptor, new java.lang.String[] { }); internal_static_GetNamespaceDescriptorRequest_descriptor = getDescriptor().getMessageTypes().get(34); internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetNamespaceDescriptorRequest_descriptor, new java.lang.String[] { "NamespaceName", }); internal_static_GetNamespaceDescriptorResponse_descriptor = getDescriptor().getMessageTypes().get(35); internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetNamespaceDescriptorResponse_descriptor, new java.lang.String[] { "NamespaceDescriptor", }); internal_static_ListNamespaceDescriptorsRequest_descriptor = getDescriptor().getMessageTypes().get(36); internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListNamespaceDescriptorsRequest_descriptor, new java.lang.String[] { }); internal_static_ListNamespaceDescriptorsResponse_descriptor = getDescriptor().getMessageTypes().get(37); internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListNamespaceDescriptorsResponse_descriptor, new java.lang.String[] { "NamespaceDescriptor", }); internal_static_ListTableDescriptorsByNamespaceRequest_descriptor = getDescriptor().getMessageTypes().get(38); internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListTableDescriptorsByNamespaceRequest_descriptor, new java.lang.String[] { "NamespaceName", }); internal_static_ListTableDescriptorsByNamespaceResponse_descriptor = getDescriptor().getMessageTypes().get(39); internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListTableDescriptorsByNamespaceResponse_descriptor, new java.lang.String[] { "TableSchema", }); internal_static_ListTableNamesByNamespaceRequest_descriptor = getDescriptor().getMessageTypes().get(40); internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListTableNamesByNamespaceRequest_descriptor, new java.lang.String[] { "NamespaceName", }); internal_static_ListTableNamesByNamespaceResponse_descriptor = getDescriptor().getMessageTypes().get(41); internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ListTableNamesByNamespaceResponse_descriptor, new java.lang.String[] { "TableName", }); internal_static_ShutdownRequest_descriptor = getDescriptor().getMessageTypes().get(42); internal_static_ShutdownRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownRequest_descriptor, new java.lang.String[] { }); internal_static_ShutdownResponse_descriptor = getDescriptor().getMessageTypes().get(43); internal_static_ShutdownResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownResponse_descriptor, new java.lang.String[] { }); internal_static_StopMasterRequest_descriptor = getDescriptor().getMessageTypes().get(44); internal_static_StopMasterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterRequest_descriptor, new java.lang.String[] { }); internal_static_StopMasterResponse_descriptor = getDescriptor().getMessageTypes().get(45); internal_static_StopMasterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterResponse_descriptor, new java.lang.String[] { }); internal_static_BalanceRequest_descriptor = getDescriptor().getMessageTypes().get(46); internal_static_BalanceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceRequest_descriptor, new java.lang.String[] { }); internal_static_BalanceResponse_descriptor = getDescriptor().getMessageTypes().get(47); internal_static_BalanceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceResponse_descriptor, new java.lang.String[] { "BalancerRan", }); internal_static_SetBalancerRunningRequest_descriptor = getDescriptor().getMessageTypes().get(48); internal_static_SetBalancerRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningRequest_descriptor, new java.lang.String[] { "On", "Synchronous", }); internal_static_SetBalancerRunningResponse_descriptor = getDescriptor().getMessageTypes().get(49); internal_static_SetBalancerRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningResponse_descriptor, new java.lang.String[] { "PrevBalanceValue", }); internal_static_RunCatalogScanRequest_descriptor = getDescriptor().getMessageTypes().get(50); internal_static_RunCatalogScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RunCatalogScanRequest_descriptor, new java.lang.String[] { }); internal_static_RunCatalogScanResponse_descriptor = getDescriptor().getMessageTypes().get(51); internal_static_RunCatalogScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RunCatalogScanResponse_descriptor, new java.lang.String[] { "ScanResult", }); internal_static_EnableCatalogJanitorRequest_descriptor = getDescriptor().getMessageTypes().get(52); internal_static_EnableCatalogJanitorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableCatalogJanitorRequest_descriptor, new java.lang.String[] { "Enable", }); internal_static_EnableCatalogJanitorResponse_descriptor = getDescriptor().getMessageTypes().get(53); internal_static_EnableCatalogJanitorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableCatalogJanitorResponse_descriptor, new java.lang.String[] { "PrevValue", }); internal_static_IsCatalogJanitorEnabledRequest_descriptor = getDescriptor().getMessageTypes().get(54); internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledRequest_descriptor, new java.lang.String[] { }); internal_static_IsCatalogJanitorEnabledResponse_descriptor = getDescriptor().getMessageTypes().get(55); internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledResponse_descriptor, new java.lang.String[] { "Value", }); internal_static_SnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(56); internal_static_SnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_SnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(57); internal_static_SnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SnapshotResponse_descriptor, new java.lang.String[] { "ExpectedTimeout", }); internal_static_GetCompletedSnapshotsRequest_descriptor = getDescriptor().getMessageTypes().get(58); internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetCompletedSnapshotsRequest_descriptor, new java.lang.String[] { }); internal_static_GetCompletedSnapshotsResponse_descriptor = getDescriptor().getMessageTypes().get(59); internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetCompletedSnapshotsResponse_descriptor, new java.lang.String[] { "Snapshots", }); internal_static_DeleteSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(60); internal_static_DeleteSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteSnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_DeleteSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(61); internal_static_DeleteSnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteSnapshotResponse_descriptor, new java.lang.String[] { }); internal_static_RestoreSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(62); internal_static_RestoreSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RestoreSnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_RestoreSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(63); internal_static_RestoreSnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RestoreSnapshotResponse_descriptor, new java.lang.String[] { }); internal_static_IsSnapshotDoneRequest_descriptor = getDescriptor().getMessageTypes().get(64); internal_static_IsSnapshotDoneRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsSnapshotDoneRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_IsSnapshotDoneResponse_descriptor = getDescriptor().getMessageTypes().get(65); internal_static_IsSnapshotDoneResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsSnapshotDoneResponse_descriptor, new java.lang.String[] { "Done", "Snapshot", }); internal_static_IsRestoreSnapshotDoneRequest_descriptor = getDescriptor().getMessageTypes().get(66); internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsRestoreSnapshotDoneRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_IsRestoreSnapshotDoneResponse_descriptor = getDescriptor().getMessageTypes().get(67); internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsRestoreSnapshotDoneResponse_descriptor, new java.lang.String[] { "Done", }); internal_static_GetSchemaAlterStatusRequest_descriptor = getDescriptor().getMessageTypes().get(68); internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetSchemaAlterStatusRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_GetSchemaAlterStatusResponse_descriptor = getDescriptor().getMessageTypes().get(69); internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetSchemaAlterStatusResponse_descriptor, new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }); internal_static_GetTableDescriptorsRequest_descriptor = getDescriptor().getMessageTypes().get(70); internal_static_GetTableDescriptorsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTableDescriptorsRequest_descriptor, new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", }); internal_static_GetTableDescriptorsResponse_descriptor = getDescriptor().getMessageTypes().get(71); internal_static_GetTableDescriptorsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTableDescriptorsResponse_descriptor, new java.lang.String[] { "TableSchema", }); internal_static_GetTableNamesRequest_descriptor = getDescriptor().getMessageTypes().get(72); internal_static_GetTableNamesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTableNamesRequest_descriptor, new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", }); internal_static_GetTableNamesResponse_descriptor = getDescriptor().getMessageTypes().get(73); internal_static_GetTableNamesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTableNamesResponse_descriptor, new java.lang.String[] { "TableNames", }); internal_static_GetClusterStatusRequest_descriptor = getDescriptor().getMessageTypes().get(74); internal_static_GetClusterStatusRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetClusterStatusRequest_descriptor, new java.lang.String[] { }); internal_static_GetClusterStatusResponse_descriptor = getDescriptor().getMessageTypes().get(75); internal_static_GetClusterStatusResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetClusterStatusResponse_descriptor, new java.lang.String[] { "ClusterStatus", }); internal_static_IsMasterRunningRequest_descriptor = getDescriptor().getMessageTypes().get(76); internal_static_IsMasterRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsMasterRunningRequest_descriptor, new java.lang.String[] { }); internal_static_IsMasterRunningResponse_descriptor = getDescriptor().getMessageTypes().get(77); internal_static_IsMasterRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsMasterRunningResponse_descriptor, new java.lang.String[] { "IsMasterRunning", }); internal_static_ExecProcedureRequest_descriptor = getDescriptor().getMessageTypes().get(78); internal_static_ExecProcedureRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ExecProcedureRequest_descriptor, new java.lang.String[] { "Procedure", }); internal_static_ExecProcedureResponse_descriptor = getDescriptor().getMessageTypes().get(79); internal_static_ExecProcedureResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ExecProcedureResponse_descriptor, new java.lang.String[] { "ExpectedTimeout", "ReturnData", }); internal_static_IsProcedureDoneRequest_descriptor = getDescriptor().getMessageTypes().get(80); internal_static_IsProcedureDoneRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsProcedureDoneRequest_descriptor, new java.lang.String[] { "Procedure", }); internal_static_IsProcedureDoneResponse_descriptor = getDescriptor().getMessageTypes().get(81); internal_static_IsProcedureDoneResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsProcedureDoneResponse_descriptor, new java.lang.String[] { "Done", "Snapshot", }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }