// Generated by the protocol buffer compiler. DO NOT EDIT! // source: MasterAdmin.proto package org.apache.hadoop.hbase.protobuf.generated; public final class MasterAdminProtos { private MasterAdminProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface AddColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); // required .ColumnFamilySchema columnFamilies = 2; boolean hasColumnFamilies(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); } public static final class AddColumnRequest extends com.google.protobuf.GeneratedMessage implements AddColumnRequestOrBuilder { // Use AddColumnRequest.newBuilder() to construct. private AddColumnRequest(Builder builder) { super(builder); } private AddColumnRequest(boolean noInit) {} private static final AddColumnRequest defaultInstance; public static AddColumnRequest getDefaultInstance() { return defaultInstance; } public AddColumnRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } // required .ColumnFamilySchema columnFamilies = 2; public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { return columnFamilies_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { return columnFamilies_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasColumnFamilies()) { memoizedIsInitialized = 0; return false; } if (!getColumnFamilies().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, columnFamilies_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, columnFamilies_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasColumnFamilies() == other.hasColumnFamilies()); if (hasColumnFamilies()) { result = result && getColumnFamilies() .equals(other.getColumnFamilies()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasColumnFamilies()) { hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; hash = (53 * hash) + getColumnFamilies().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getColumnFamiliesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (columnFamiliesBuilder_ == null) { result.columnFamilies_ = columnFamilies_; } else { result.columnFamilies_ = columnFamiliesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasColumnFamilies()) { mergeColumnFamilies(other.getColumnFamilies()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasColumnFamilies()) { return false; } if (!getColumnFamilies().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); if (hasColumnFamilies()) { subBuilder.mergeFrom(getColumnFamilies()); } input.readMessage(subBuilder, extensionRegistry); setColumnFamilies(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required .ColumnFamilySchema columnFamilies = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { return columnFamilies_; } else { return columnFamiliesBuilder_.getMessage(); } } public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } columnFamilies_ = value; onChanged(); } else { columnFamiliesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } public Builder setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { columnFamilies_ = builderForValue.build(); onChanged(); } else { columnFamiliesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); } else { columnFamilies_ = value; } onChanged(); } else { columnFamiliesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); onChanged(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getColumnFamiliesFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); } else { return columnFamilies_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, getParentForChildren(), isClean()); columnFamilies_ = null; } return columnFamiliesBuilder_; } // @@protoc_insertion_point(builder_scope:AddColumnRequest) } static { defaultInstance = new AddColumnRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AddColumnRequest) } public interface AddColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class AddColumnResponse extends com.google.protobuf.GeneratedMessage implements AddColumnResponseOrBuilder { // Use AddColumnResponse.newBuilder() to construct. private AddColumnResponse(Builder builder) { super(builder); } private AddColumnResponse(boolean noInit) {} private static final AddColumnResponse defaultInstance; public static AddColumnResponse getDefaultInstance() { return defaultInstance; } public AddColumnResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AddColumnResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:AddColumnResponse) } static { defaultInstance = new AddColumnResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AddColumnResponse) } public interface DeleteColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); // required bytes columnName = 2; boolean hasColumnName(); com.google.protobuf.ByteString getColumnName(); } public static final class DeleteColumnRequest extends com.google.protobuf.GeneratedMessage implements DeleteColumnRequestOrBuilder { // Use DeleteColumnRequest.newBuilder() to construct. private DeleteColumnRequest(Builder builder) { super(builder); } private DeleteColumnRequest(boolean noInit) {} private static final DeleteColumnRequest defaultInstance; public static DeleteColumnRequest getDefaultInstance() { return defaultInstance; } public DeleteColumnRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } // required bytes columnName = 2; public static final int COLUMNNAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString columnName_; public boolean hasColumnName() { return ((bitField0_ & 0x00000002) == 0x00000002); } public com.google.protobuf.ByteString getColumnName() { return columnName_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; columnName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasColumnName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, columnName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, columnName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasColumnName() == other.hasColumnName()); if (hasColumnName()) { result = result && getColumnName() .equals(other.getColumnName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasColumnName()) { hash = (37 * hash) + COLUMNNAME_FIELD_NUMBER; hash = (53 * hash) + getColumnName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); columnName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.columnName_ = columnName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasColumnName()) { setColumnName(other.getColumnName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasColumnName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; columnName_ = input.readBytes(); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required bytes columnName = 2; private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasColumnName() { return ((bitField0_ & 0x00000002) == 0x00000002); } public com.google.protobuf.ByteString getColumnName() { return columnName_; } public Builder setColumnName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; columnName_ = value; onChanged(); return this; } public Builder clearColumnName() { bitField0_ = (bitField0_ & ~0x00000002); columnName_ = getDefaultInstance().getColumnName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:DeleteColumnRequest) } static { defaultInstance = new DeleteColumnRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteColumnRequest) } public interface DeleteColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class DeleteColumnResponse extends com.google.protobuf.GeneratedMessage implements DeleteColumnResponseOrBuilder { // Use DeleteColumnResponse.newBuilder() to construct. private DeleteColumnResponse(Builder builder) { super(builder); } private DeleteColumnResponse(boolean noInit) {} private static final DeleteColumnResponse defaultInstance; public static DeleteColumnResponse getDefaultInstance() { return defaultInstance; } public DeleteColumnResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:DeleteColumnResponse) } static { defaultInstance = new DeleteColumnResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteColumnResponse) } public interface ModifyColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); // required .ColumnFamilySchema columnFamilies = 2; boolean hasColumnFamilies(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); } public static final class ModifyColumnRequest extends com.google.protobuf.GeneratedMessage implements ModifyColumnRequestOrBuilder { // Use ModifyColumnRequest.newBuilder() to construct. private ModifyColumnRequest(Builder builder) { super(builder); } private ModifyColumnRequest(boolean noInit) {} private static final ModifyColumnRequest defaultInstance; public static ModifyColumnRequest getDefaultInstance() { return defaultInstance; } public ModifyColumnRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } // required .ColumnFamilySchema columnFamilies = 2; public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { return columnFamilies_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { return columnFamilies_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasColumnFamilies()) { memoizedIsInitialized = 0; return false; } if (!getColumnFamilies().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, columnFamilies_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, columnFamilies_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasColumnFamilies() == other.hasColumnFamilies()); if (hasColumnFamilies()) { result = result && getColumnFamilies() .equals(other.getColumnFamilies()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasColumnFamilies()) { hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; hash = (53 * hash) + getColumnFamilies().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getColumnFamiliesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (columnFamiliesBuilder_ == null) { result.columnFamilies_ = columnFamilies_; } else { result.columnFamilies_ = columnFamiliesBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasColumnFamilies()) { mergeColumnFamilies(other.getColumnFamilies()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasColumnFamilies()) { return false; } if (!getColumnFamilies().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); if (hasColumnFamilies()) { subBuilder.mergeFrom(getColumnFamilies()); } input.readMessage(subBuilder, extensionRegistry); setColumnFamilies(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required .ColumnFamilySchema columnFamilies = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; public boolean hasColumnFamilies() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { if (columnFamiliesBuilder_ == null) { return columnFamilies_; } else { return columnFamiliesBuilder_.getMessage(); } } public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } columnFamilies_ = value; onChanged(); } else { columnFamiliesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } public Builder setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { if (columnFamiliesBuilder_ == null) { columnFamilies_ = builderForValue.build(); onChanged(); } else { columnFamiliesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { if (columnFamiliesBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); } else { columnFamilies_ = value; } onChanged(); } else { columnFamiliesBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } public Builder clearColumnFamilies() { if (columnFamiliesBuilder_ == null) { columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); onChanged(); } else { columnFamiliesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getColumnFamiliesFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { if (columnFamiliesBuilder_ != null) { return columnFamiliesBuilder_.getMessageOrBuilder(); } else { return columnFamilies_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder() { if (columnFamiliesBuilder_ == null) { columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( columnFamilies_, getParentForChildren(), isClean()); columnFamilies_ = null; } return columnFamiliesBuilder_; } // @@protoc_insertion_point(builder_scope:ModifyColumnRequest) } static { defaultInstance = new ModifyColumnRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyColumnRequest) } public interface ModifyColumnResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class ModifyColumnResponse extends com.google.protobuf.GeneratedMessage implements ModifyColumnResponseOrBuilder { // Use ModifyColumnResponse.newBuilder() to construct. private ModifyColumnResponse(Builder builder) { super(builder); } private ModifyColumnResponse(boolean noInit) {} private static final ModifyColumnResponse defaultInstance; public static ModifyColumnResponse getDefaultInstance() { return defaultInstance; } public ModifyColumnResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:ModifyColumnResponse) } static { defaultInstance = new ModifyColumnResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyColumnResponse) } public interface MoveRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; boolean hasRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); // optional .ServerName destServerName = 2; boolean hasDestServerName(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder(); } public static final class MoveRegionRequest extends com.google.protobuf.GeneratedMessage implements MoveRegionRequestOrBuilder { // Use MoveRegionRequest.newBuilder() to construct. private MoveRegionRequest(Builder builder) { super(builder); } private MoveRegionRequest(boolean noInit) {} private static final MoveRegionRequest defaultInstance; public static MoveRegionRequest getDefaultInstance() { return defaultInstance; } public MoveRegionRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } // optional .ServerName destServerName = 2; public static final int DESTSERVERNAME_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_; public boolean hasDestServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { return destServerName_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { return destServerName_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } if (hasDestServerName()) { if (!getDestServerName().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, destServerName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, destServerName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasDestServerName() == other.hasDestServerName()); if (hasDestServerName()) { result = result && getDestServerName() .equals(other.getDestServerName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasDestServerName()) { hash = (37 * hash) + DESTSERVERNAME_FIELD_NUMBER; hash = (53 * hash) + getDestServerName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); getDestServerNameFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (destServerNameBuilder_ == null) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } else { destServerNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (destServerNameBuilder_ == null) { result.destServerName_ = destServerName_; } else { result.destServerName_ = destServerNameBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasDestServerName()) { mergeDestServerName(other.getDestServerName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } if (hasDestServerName()) { if (!getDestServerName().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); if (hasRegion()) { subBuilder.mergeFrom(getRegion()); } input.readMessage(subBuilder, extensionRegistry); setRegion(subBuilder.buildPartial()); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); if (hasDestServerName()) { subBuilder.mergeFrom(getDestServerName()); } input.readMessage(subBuilder, extensionRegistry); setDestServerName(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // optional .ServerName destServerName = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_; public boolean hasDestServerName() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() { if (destServerNameBuilder_ == null) { return destServerName_; } else { return destServerNameBuilder_.getMessage(); } } public Builder setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } destServerName_ = value; onChanged(); } else { destServerNameBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } public Builder setDestServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (destServerNameBuilder_ == null) { destServerName_ = builderForValue.build(); onChanged(); } else { destServerNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } public Builder mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (destServerNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && destServerName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destServerName_).mergeFrom(value).buildPartial(); } else { destServerName_ = value; } onChanged(); } else { destServerNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } public Builder clearDestServerName() { if (destServerNameBuilder_ == null) { destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); onChanged(); } else { destServerNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestServerNameBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDestServerNameFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() { if (destServerNameBuilder_ != null) { return destServerNameBuilder_.getMessageOrBuilder(); } else { return destServerName_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestServerNameFieldBuilder() { if (destServerNameBuilder_ == null) { destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( destServerName_, getParentForChildren(), isClean()); destServerName_ = null; } return destServerNameBuilder_; } // @@protoc_insertion_point(builder_scope:MoveRegionRequest) } static { defaultInstance = new MoveRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:MoveRegionRequest) } public interface MoveRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class MoveRegionResponse extends com.google.protobuf.GeneratedMessage implements MoveRegionResponseOrBuilder { // Use MoveRegionResponse.newBuilder() to construct. private MoveRegionResponse(Builder builder) { super(builder); } private MoveRegionResponse(boolean noInit) {} private static final MoveRegionResponse defaultInstance; public static MoveRegionResponse getDefaultInstance() { return defaultInstance; } public MoveRegionResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_MoveRegionResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:MoveRegionResponse) } static { defaultInstance = new MoveRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:MoveRegionResponse) } public interface AssignRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; boolean hasRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } public static final class AssignRegionRequest extends com.google.protobuf.GeneratedMessage implements AssignRegionRequestOrBuilder { // Use AssignRegionRequest.newBuilder() to construct. private AssignRegionRequest(Builder builder) { super(builder); } private AssignRegionRequest(boolean noInit) {} private static final AssignRegionRequest defaultInstance; public static AssignRegionRequest getDefaultInstance() { return defaultInstance; } public AssignRegionRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); if (hasRegion()) { subBuilder.mergeFrom(getRegion()); } input.readMessage(subBuilder, extensionRegistry); setRegion(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // @@protoc_insertion_point(builder_scope:AssignRegionRequest) } static { defaultInstance = new AssignRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AssignRegionRequest) } public interface AssignRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class AssignRegionResponse extends com.google.protobuf.GeneratedMessage implements AssignRegionResponseOrBuilder { // Use AssignRegionResponse.newBuilder() to construct. private AssignRegionResponse(Builder builder) { super(builder); } private AssignRegionResponse(boolean noInit) {} private static final AssignRegionResponse defaultInstance; public static AssignRegionResponse getDefaultInstance() { return defaultInstance; } public AssignRegionResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_AssignRegionResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:AssignRegionResponse) } static { defaultInstance = new AssignRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:AssignRegionResponse) } public interface UnassignRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; boolean hasRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); // optional bool force = 2 [default = false]; boolean hasForce(); boolean getForce(); } public static final class UnassignRegionRequest extends com.google.protobuf.GeneratedMessage implements UnassignRegionRequestOrBuilder { // Use UnassignRegionRequest.newBuilder() to construct. private UnassignRegionRequest(Builder builder) { super(builder); } private UnassignRegionRequest(boolean noInit) {} private static final UnassignRegionRequest defaultInstance; public static UnassignRegionRequest getDefaultInstance() { return defaultInstance; } public UnassignRegionRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } // optional bool force = 2 [default = false]; public static final int FORCE_FIELD_NUMBER = 2; private boolean force_; public boolean hasForce() { return ((bitField0_ & 0x00000002) == 0x00000002); } public boolean getForce() { return force_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); force_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, force_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, force_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasForce() == other.hasForce()); if (hasForce()) { result = result && (getForce() == other.getForce()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasForce()) { hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getForce()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); force_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.force_ = force_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasForce()) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); if (hasRegion()) { subBuilder.mergeFrom(getRegion()); } input.readMessage(subBuilder, extensionRegistry); setRegion(subBuilder.buildPartial()); break; } case 16: { bitField0_ |= 0x00000002; force_ = input.readBool(); break; } } } } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // optional bool force = 2 [default = false]; private boolean force_ ; public boolean hasForce() { return ((bitField0_ & 0x00000002) == 0x00000002); } public boolean getForce() { return force_; } public Builder setForce(boolean value) { bitField0_ |= 0x00000002; force_ = value; onChanged(); return this; } public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000002); force_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:UnassignRegionRequest) } static { defaultInstance = new UnassignRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:UnassignRegionRequest) } public interface UnassignRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class UnassignRegionResponse extends com.google.protobuf.GeneratedMessage implements UnassignRegionResponseOrBuilder { // Use UnassignRegionResponse.newBuilder() to construct. private UnassignRegionResponse(Builder builder) { super(builder); } private UnassignRegionResponse(boolean noInit) {} private static final UnassignRegionResponse defaultInstance; public static UnassignRegionResponse getDefaultInstance() { return defaultInstance; } public UnassignRegionResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_UnassignRegionResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:UnassignRegionResponse) } static { defaultInstance = new UnassignRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:UnassignRegionResponse) } public interface OfflineRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; boolean hasRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } public static final class OfflineRegionRequest extends com.google.protobuf.GeneratedMessage implements OfflineRegionRequestOrBuilder { // Use OfflineRegionRequest.newBuilder() to construct. private OfflineRegionRequest(Builder builder) { super(builder); } private OfflineRegionRequest(boolean noInit) {} private static final OfflineRegionRequest defaultInstance; public static OfflineRegionRequest getDefaultInstance() { return defaultInstance; } public OfflineRegionRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; } private int bitField0_; // required .RegionSpecifier region = 1; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); if (hasRegion()) { subBuilder.mergeFrom(getRegion()); } input.readMessage(subBuilder, extensionRegistry); setRegion(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required .RegionSpecifier region = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_; } else { return regionBuilder_.getMessage(); } } public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } public Builder setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } public Builder clearRegion() { if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } // @@protoc_insertion_point(builder_scope:OfflineRegionRequest) } static { defaultInstance = new OfflineRegionRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:OfflineRegionRequest) } public interface OfflineRegionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class OfflineRegionResponse extends com.google.protobuf.GeneratedMessage implements OfflineRegionResponseOrBuilder { // Use OfflineRegionResponse.newBuilder() to construct. private OfflineRegionResponse(Builder builder) { super(builder); } private OfflineRegionResponse(boolean noInit) {} private static final OfflineRegionResponse defaultInstance; public static OfflineRegionResponse getDefaultInstance() { return defaultInstance; } public OfflineRegionResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:OfflineRegionResponse) } static { defaultInstance = new OfflineRegionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:OfflineRegionResponse) } public interface CreateTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .TableSchema tableSchema = 1; boolean hasTableSchema(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); // repeated bytes splitKeys = 2; java.util.List<com.google.protobuf.ByteString> getSplitKeysList(); int getSplitKeysCount(); com.google.protobuf.ByteString getSplitKeys(int index); } public static final class CreateTableRequest extends com.google.protobuf.GeneratedMessage implements CreateTableRequestOrBuilder { // Use CreateTableRequest.newBuilder() to construct. private CreateTableRequest(Builder builder) { super(builder); } private CreateTableRequest(boolean noInit) {} private static final CreateTableRequest defaultInstance; public static CreateTableRequest getDefaultInstance() { return defaultInstance; } public CreateTableRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; } private int bitField0_; // required .TableSchema tableSchema = 1; public static final int TABLESCHEMA_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; public boolean hasTableSchema() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { return tableSchema_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { return tableSchema_; } // repeated bytes splitKeys = 2; public static final int SPLITKEYS_FIELD_NUMBER = 2; private java.util.List<com.google.protobuf.ByteString> splitKeys_; public java.util.List<com.google.protobuf.ByteString> getSplitKeysList() { return splitKeys_; } public int getSplitKeysCount() { return splitKeys_.size(); } public com.google.protobuf.ByteString getSplitKeys(int index) { return splitKeys_.get(index); } private void initFields() { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); splitKeys_ = java.util.Collections.emptyList();; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableSchema()) { memoizedIsInitialized = 0; return false; } if (!getTableSchema().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableSchema_); } for (int i = 0; i < splitKeys_.size(); i++) { output.writeBytes(2, splitKeys_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableSchema_); } { int dataSize = 0; for (int i = 0; i < splitKeys_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(splitKeys_.get(i)); } size += dataSize; size += 1 * getSplitKeysList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) obj; boolean result = true; result = result && (hasTableSchema() == other.hasTableSchema()); if (hasTableSchema()) { result = result && getTableSchema() .equals(other.getTableSchema()); } result = result && getSplitKeysList() .equals(other.getSplitKeysList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableSchema()) { hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchema().hashCode(); } if (getSplitKeysCount() > 0) { hash = (37 * hash) + SPLITKEYS_FIELD_NUMBER; hash = (53 * hash) + getSplitKeysList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); splitKeys_ = java.util.Collections.emptyList();; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableSchemaBuilder_ == null) { result.tableSchema_ = tableSchema_; } else { result.tableSchema_ = tableSchemaBuilder_.build(); } if (((bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); bitField0_ = (bitField0_ & ~0x00000002); } result.splitKeys_ = splitKeys_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance()) return this; if (other.hasTableSchema()) { mergeTableSchema(other.getTableSchema()); } if (!other.splitKeys_.isEmpty()) { if (splitKeys_.isEmpty()) { splitKeys_ = other.splitKeys_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureSplitKeysIsMutable(); splitKeys_.addAll(other.splitKeys_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableSchema()) { return false; } if (!getTableSchema().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); if (hasTableSchema()) { subBuilder.mergeFrom(getTableSchema()); } input.readMessage(subBuilder, extensionRegistry); setTableSchema(subBuilder.buildPartial()); break; } case 18: { ensureSplitKeysIsMutable(); splitKeys_.add(input.readBytes()); break; } } } } private int bitField0_; // required .TableSchema tableSchema = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; public boolean hasTableSchema() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { return tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } } public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableSchema_ = value; onChanged(); } else { tableSchemaBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } public Builder setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { tableSchema_ = builderForValue.build(); onChanged(); } else { tableSchemaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); } else { tableSchema_ = value; } onChanged(); } else { tableSchemaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); onChanged(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableSchemaFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { return tableSchema_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, getParentForChildren(), isClean()); tableSchema_ = null; } return tableSchemaBuilder_; } // repeated bytes splitKeys = 2; private java.util.List<com.google.protobuf.ByteString> splitKeys_ = java.util.Collections.emptyList();; private void ensureSplitKeysIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { splitKeys_ = new java.util.ArrayList<com.google.protobuf.ByteString>(splitKeys_); bitField0_ |= 0x00000002; } } public java.util.List<com.google.protobuf.ByteString> getSplitKeysList() { return java.util.Collections.unmodifiableList(splitKeys_); } public int getSplitKeysCount() { return splitKeys_.size(); } public com.google.protobuf.ByteString getSplitKeys(int index) { return splitKeys_.get(index); } public Builder setSplitKeys( int index, com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureSplitKeysIsMutable(); splitKeys_.set(index, value); onChanged(); return this; } public Builder addSplitKeys(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureSplitKeysIsMutable(); splitKeys_.add(value); onChanged(); return this; } public Builder addAllSplitKeys( java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { ensureSplitKeysIsMutable(); super.addAll(values, splitKeys_); onChanged(); return this; } public Builder clearSplitKeys() { splitKeys_ = java.util.Collections.emptyList();; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:CreateTableRequest) } static { defaultInstance = new CreateTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CreateTableRequest) } public interface CreateTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class CreateTableResponse extends com.google.protobuf.GeneratedMessage implements CreateTableResponseOrBuilder { // Use CreateTableResponse.newBuilder() to construct. private CreateTableResponse(Builder builder) { super(builder); } private CreateTableResponse(boolean noInit) {} private static final CreateTableResponse defaultInstance; public static CreateTableResponse getDefaultInstance() { return defaultInstance; } public CreateTableResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CreateTableResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:CreateTableResponse) } static { defaultInstance = new CreateTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CreateTableResponse) } public interface DeleteTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); } public static final class DeleteTableRequest extends com.google.protobuf.GeneratedMessage implements DeleteTableRequestOrBuilder { // Use DeleteTableRequest.newBuilder() to construct. private DeleteTableRequest(Builder builder) { super(builder); } private DeleteTableRequest(boolean noInit) {} private static final DeleteTableRequest defaultInstance; public static DeleteTableRequest getDefaultInstance() { return defaultInstance; } public DeleteTableRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:DeleteTableRequest) } static { defaultInstance = new DeleteTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteTableRequest) } public interface DeleteTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class DeleteTableResponse extends com.google.protobuf.GeneratedMessage implements DeleteTableResponseOrBuilder { // Use DeleteTableResponse.newBuilder() to construct. private DeleteTableResponse(Builder builder) { super(builder); } private DeleteTableResponse(boolean noInit) {} private static final DeleteTableResponse defaultInstance; public static DeleteTableResponse getDefaultInstance() { return defaultInstance; } public DeleteTableResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DeleteTableResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:DeleteTableResponse) } static { defaultInstance = new DeleteTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DeleteTableResponse) } public interface EnableTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); } public static final class EnableTableRequest extends com.google.protobuf.GeneratedMessage implements EnableTableRequestOrBuilder { // Use EnableTableRequest.newBuilder() to construct. private EnableTableRequest(Builder builder) { super(builder); } private EnableTableRequest(boolean noInit) {} private static final EnableTableRequest defaultInstance; public static EnableTableRequest getDefaultInstance() { return defaultInstance; } public EnableTableRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:EnableTableRequest) } static { defaultInstance = new EnableTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableTableRequest) } public interface EnableTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class EnableTableResponse extends com.google.protobuf.GeneratedMessage implements EnableTableResponseOrBuilder { // Use EnableTableResponse.newBuilder() to construct. private EnableTableResponse(Builder builder) { super(builder); } private EnableTableResponse(boolean noInit) {} private static final EnableTableResponse defaultInstance; public static EnableTableResponse getDefaultInstance() { return defaultInstance; } public EnableTableResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableTableResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:EnableTableResponse) } static { defaultInstance = new EnableTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableTableResponse) } public interface DisableTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); } public static final class DisableTableRequest extends com.google.protobuf.GeneratedMessage implements DisableTableRequestOrBuilder { // Use DisableTableRequest.newBuilder() to construct. private DisableTableRequest(Builder builder) { super(builder); } private DisableTableRequest(boolean noInit) {} private static final DisableTableRequest defaultInstance; public static DisableTableRequest getDefaultInstance() { return defaultInstance; } public DisableTableRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:DisableTableRequest) } static { defaultInstance = new DisableTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DisableTableRequest) } public interface DisableTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class DisableTableResponse extends com.google.protobuf.GeneratedMessage implements DisableTableResponseOrBuilder { // Use DisableTableResponse.newBuilder() to construct. private DisableTableResponse(Builder builder) { super(builder); } private DisableTableResponse(boolean noInit) {} private static final DisableTableResponse defaultInstance; public static DisableTableResponse getDefaultInstance() { return defaultInstance; } public DisableTableResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_DisableTableResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:DisableTableResponse) } static { defaultInstance = new DisableTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:DisableTableResponse) } public interface ModifyTableRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes tableName = 1; boolean hasTableName(); com.google.protobuf.ByteString getTableName(); // required .TableSchema tableSchema = 2; boolean hasTableSchema(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); } public static final class ModifyTableRequest extends com.google.protobuf.GeneratedMessage implements ModifyTableRequestOrBuilder { // Use ModifyTableRequest.newBuilder() to construct. private ModifyTableRequest(Builder builder) { super(builder); } private ModifyTableRequest(boolean noInit) {} private static final ModifyTableRequest defaultInstance; public static ModifyTableRequest getDefaultInstance() { return defaultInstance; } public ModifyTableRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; } private int bitField0_; // required bytes tableName = 1; public static final int TABLENAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } // required .TableSchema tableSchema = 2; public static final int TABLESCHEMA_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; public boolean hasTableSchema() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { return tableSchema_; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { return tableSchema_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasTableSchema()) { memoizedIsInitialized = 0; return false; } if (!getTableSchema().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, tableSchema_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, tableSchema_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasTableSchema() == other.hasTableSchema()); if (hasTableSchema()) { result = result && getTableSchema() .equals(other.getTableSchema()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLENAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasTableSchema()) { hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchema().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableSchemaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (tableSchemaBuilder_ == null) { result.tableSchema_ = tableSchema_; } else { result.tableSchema_ = tableSchemaBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasTableSchema()) { mergeTableSchema(other.getTableSchema()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasTableSchema()) { return false; } if (!getTableSchema().isInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); if (hasTableSchema()) { subBuilder.mergeFrom(getTableSchema()); } input.readMessage(subBuilder, extensionRegistry); setTableSchema(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required bytes tableName = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getTableName() { return tableName_; } public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required .TableSchema tableSchema = 2; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; public boolean hasTableSchema() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { if (tableSchemaBuilder_ == null) { return tableSchema_; } else { return tableSchemaBuilder_.getMessage(); } } public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableSchema_ = value; onChanged(); } else { tableSchemaBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } public Builder setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { if (tableSchemaBuilder_ == null) { tableSchema_ = builderForValue.build(); onChanged(); } else { tableSchemaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { if (tableSchemaBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); } else { tableSchema_ = value; } onChanged(); } else { tableSchemaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } public Builder clearTableSchema() { if (tableSchemaBuilder_ == null) { tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); onChanged(); } else { tableSchemaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTableSchemaFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { if (tableSchemaBuilder_ != null) { return tableSchemaBuilder_.getMessageOrBuilder(); } else { return tableSchema_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> getTableSchemaFieldBuilder() { if (tableSchemaBuilder_ == null) { tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( tableSchema_, getParentForChildren(), isClean()); tableSchema_ = null; } return tableSchemaBuilder_; } // @@protoc_insertion_point(builder_scope:ModifyTableRequest) } static { defaultInstance = new ModifyTableRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyTableRequest) } public interface ModifyTableResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class ModifyTableResponse extends com.google.protobuf.GeneratedMessage implements ModifyTableResponseOrBuilder { // Use ModifyTableResponse.newBuilder() to construct. private ModifyTableResponse(Builder builder) { super(builder); } private ModifyTableResponse(boolean noInit) {} private static final ModifyTableResponse defaultInstance; public static ModifyTableResponse getDefaultInstance() { return defaultInstance; } public ModifyTableResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyTableResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:ModifyTableResponse) } static { defaultInstance = new ModifyTableResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ModifyTableResponse) } public interface ShutdownRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class ShutdownRequest extends com.google.protobuf.GeneratedMessage implements ShutdownRequestOrBuilder { // Use ShutdownRequest.newBuilder() to construct. private ShutdownRequest(Builder builder) { super(builder); } private ShutdownRequest(boolean noInit) {} private static final ShutdownRequest defaultInstance; public static ShutdownRequest getDefaultInstance() { return defaultInstance; } public ShutdownRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:ShutdownRequest) } static { defaultInstance = new ShutdownRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ShutdownRequest) } public interface ShutdownResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class ShutdownResponse extends com.google.protobuf.GeneratedMessage implements ShutdownResponseOrBuilder { // Use ShutdownResponse.newBuilder() to construct. private ShutdownResponse(Builder builder) { super(builder); } private ShutdownResponse(boolean noInit) {} private static final ShutdownResponse defaultInstance; public static ShutdownResponse getDefaultInstance() { return defaultInstance; } public ShutdownResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ShutdownResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:ShutdownResponse) } static { defaultInstance = new ShutdownResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ShutdownResponse) } public interface StopMasterRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class StopMasterRequest extends com.google.protobuf.GeneratedMessage implements StopMasterRequestOrBuilder { // Use StopMasterRequest.newBuilder() to construct. private StopMasterRequest(Builder builder) { super(builder); } private StopMasterRequest(boolean noInit) {} private static final StopMasterRequest defaultInstance; public static StopMasterRequest getDefaultInstance() { return defaultInstance; } public StopMasterRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:StopMasterRequest) } static { defaultInstance = new StopMasterRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:StopMasterRequest) } public interface StopMasterResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class StopMasterResponse extends com.google.protobuf.GeneratedMessage implements StopMasterResponseOrBuilder { // Use StopMasterResponse.newBuilder() to construct. private StopMasterResponse(Builder builder) { super(builder); } private StopMasterResponse(boolean noInit) {} private static final StopMasterResponse defaultInstance; public static StopMasterResponse getDefaultInstance() { return defaultInstance; } public StopMasterResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_StopMasterResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:StopMasterResponse) } static { defaultInstance = new StopMasterResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:StopMasterResponse) } public interface BalanceRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class BalanceRequest extends com.google.protobuf.GeneratedMessage implements BalanceRequestOrBuilder { // Use BalanceRequest.newBuilder() to construct. private BalanceRequest(Builder builder) { super(builder); } private BalanceRequest(boolean noInit) {} private static final BalanceRequest defaultInstance; public static BalanceRequest getDefaultInstance() { return defaultInstance; } public BalanceRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:BalanceRequest) } static { defaultInstance = new BalanceRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:BalanceRequest) } public interface BalanceResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool balancerRan = 1; boolean hasBalancerRan(); boolean getBalancerRan(); } public static final class BalanceResponse extends com.google.protobuf.GeneratedMessage implements BalanceResponseOrBuilder { // Use BalanceResponse.newBuilder() to construct. private BalanceResponse(Builder builder) { super(builder); } private BalanceResponse(boolean noInit) {} private static final BalanceResponse defaultInstance; public static BalanceResponse getDefaultInstance() { return defaultInstance; } public BalanceResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; } private int bitField0_; // required bool balancerRan = 1; public static final int BALANCERRAN_FIELD_NUMBER = 1; private boolean balancerRan_; public boolean hasBalancerRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getBalancerRan() { return balancerRan_; } private void initFields() { balancerRan_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasBalancerRan()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, balancerRan_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, balancerRan_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) obj; boolean result = true; result = result && (hasBalancerRan() == other.hasBalancerRan()); if (hasBalancerRan()) { result = result && (getBalancerRan() == other.getBalancerRan()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBalancerRan()) { hash = (37 * hash) + BALANCERRAN_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getBalancerRan()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_BalanceResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); balancerRan_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.balancerRan_ = balancerRan_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()) return this; if (other.hasBalancerRan()) { setBalancerRan(other.getBalancerRan()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasBalancerRan()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; balancerRan_ = input.readBool(); break; } } } } private int bitField0_; // required bool balancerRan = 1; private boolean balancerRan_ ; public boolean hasBalancerRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getBalancerRan() { return balancerRan_; } public Builder setBalancerRan(boolean value) { bitField0_ |= 0x00000001; balancerRan_ = value; onChanged(); return this; } public Builder clearBalancerRan() { bitField0_ = (bitField0_ & ~0x00000001); balancerRan_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:BalanceResponse) } static { defaultInstance = new BalanceResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:BalanceResponse) } public interface SetBalancerRunningRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool on = 1; boolean hasOn(); boolean getOn(); // optional bool synchronous = 2; boolean hasSynchronous(); boolean getSynchronous(); } public static final class SetBalancerRunningRequest extends com.google.protobuf.GeneratedMessage implements SetBalancerRunningRequestOrBuilder { // Use SetBalancerRunningRequest.newBuilder() to construct. private SetBalancerRunningRequest(Builder builder) { super(builder); } private SetBalancerRunningRequest(boolean noInit) {} private static final SetBalancerRunningRequest defaultInstance; public static SetBalancerRunningRequest getDefaultInstance() { return defaultInstance; } public SetBalancerRunningRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; } private int bitField0_; // required bool on = 1; public static final int ON_FIELD_NUMBER = 1; private boolean on_; public boolean hasOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getOn() { return on_; } // optional bool synchronous = 2; public static final int SYNCHRONOUS_FIELD_NUMBER = 2; private boolean synchronous_; public boolean hasSynchronous() { return ((bitField0_ & 0x00000002) == 0x00000002); } public boolean getSynchronous() { return synchronous_; } private void initFields() { on_ = false; synchronous_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasOn()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, on_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, synchronous_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, on_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(2, synchronous_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) obj; boolean result = true; result = result && (hasOn() == other.hasOn()); if (hasOn()) { result = result && (getOn() == other.getOn()); } result = result && (hasSynchronous() == other.hasSynchronous()); if (hasSynchronous()) { result = result && (getSynchronous() == other.getSynchronous()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasOn()) { hash = (37 * hash) + ON_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getOn()); } if (hasSynchronous()) { hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getSynchronous()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); on_ = false; bitField0_ = (bitField0_ & ~0x00000001); synchronous_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.on_ = on_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.synchronous_ = synchronous_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance()) return this; if (other.hasOn()) { setOn(other.getOn()); } if (other.hasSynchronous()) { setSynchronous(other.getSynchronous()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasOn()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; on_ = input.readBool(); break; } case 16: { bitField0_ |= 0x00000002; synchronous_ = input.readBool(); break; } } } } private int bitField0_; // required bool on = 1; private boolean on_ ; public boolean hasOn() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getOn() { return on_; } public Builder setOn(boolean value) { bitField0_ |= 0x00000001; on_ = value; onChanged(); return this; } public Builder clearOn() { bitField0_ = (bitField0_ & ~0x00000001); on_ = false; onChanged(); return this; } // optional bool synchronous = 2; private boolean synchronous_ ; public boolean hasSynchronous() { return ((bitField0_ & 0x00000002) == 0x00000002); } public boolean getSynchronous() { return synchronous_; } public Builder setSynchronous(boolean value) { bitField0_ |= 0x00000002; synchronous_ = value; onChanged(); return this; } public Builder clearSynchronous() { bitField0_ = (bitField0_ & ~0x00000002); synchronous_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:SetBalancerRunningRequest) } static { defaultInstance = new SetBalancerRunningRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:SetBalancerRunningRequest) } public interface SetBalancerRunningResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool prevBalanceValue = 1; boolean hasPrevBalanceValue(); boolean getPrevBalanceValue(); } public static final class SetBalancerRunningResponse extends com.google.protobuf.GeneratedMessage implements SetBalancerRunningResponseOrBuilder { // Use SetBalancerRunningResponse.newBuilder() to construct. private SetBalancerRunningResponse(Builder builder) { super(builder); } private SetBalancerRunningResponse(boolean noInit) {} private static final SetBalancerRunningResponse defaultInstance; public static SetBalancerRunningResponse getDefaultInstance() { return defaultInstance; } public SetBalancerRunningResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; } private int bitField0_; // optional bool prevBalanceValue = 1; public static final int PREVBALANCEVALUE_FIELD_NUMBER = 1; private boolean prevBalanceValue_; public boolean hasPrevBalanceValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getPrevBalanceValue() { return prevBalanceValue_; } private void initFields() { prevBalanceValue_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevBalanceValue_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevBalanceValue_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) obj; boolean result = true; result = result && (hasPrevBalanceValue() == other.hasPrevBalanceValue()); if (hasPrevBalanceValue()) { result = result && (getPrevBalanceValue() == other.getPrevBalanceValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevBalanceValue()) { hash = (37 * hash) + PREVBALANCEVALUE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getPrevBalanceValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); prevBalanceValue_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.prevBalanceValue_ = prevBalanceValue_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()) return this; if (other.hasPrevBalanceValue()) { setPrevBalanceValue(other.getPrevBalanceValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; prevBalanceValue_ = input.readBool(); break; } } } } private int bitField0_; // optional bool prevBalanceValue = 1; private boolean prevBalanceValue_ ; public boolean hasPrevBalanceValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getPrevBalanceValue() { return prevBalanceValue_; } public Builder setPrevBalanceValue(boolean value) { bitField0_ |= 0x00000001; prevBalanceValue_ = value; onChanged(); return this; } public Builder clearPrevBalanceValue() { bitField0_ = (bitField0_ & ~0x00000001); prevBalanceValue_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:SetBalancerRunningResponse) } static { defaultInstance = new SetBalancerRunningResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) } public interface CatalogScanRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class CatalogScanRequest extends com.google.protobuf.GeneratedMessage implements CatalogScanRequestOrBuilder { // Use CatalogScanRequest.newBuilder() to construct. private CatalogScanRequest(Builder builder) { super(builder); } private CatalogScanRequest(boolean noInit) {} private static final CatalogScanRequest defaultInstance; public static CatalogScanRequest getDefaultInstance() { return defaultInstance; } public CatalogScanRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:CatalogScanRequest) } static { defaultInstance = new CatalogScanRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CatalogScanRequest) } public interface CatalogScanResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional int32 scanResult = 1; boolean hasScanResult(); int getScanResult(); } public static final class CatalogScanResponse extends com.google.protobuf.GeneratedMessage implements CatalogScanResponseOrBuilder { // Use CatalogScanResponse.newBuilder() to construct. private CatalogScanResponse(Builder builder) { super(builder); } private CatalogScanResponse(boolean noInit) {} private static final CatalogScanResponse defaultInstance; public static CatalogScanResponse getDefaultInstance() { return defaultInstance; } public CatalogScanResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; } private int bitField0_; // optional int32 scanResult = 1; public static final int SCANRESULT_FIELD_NUMBER = 1; private int scanResult_; public boolean hasScanResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getScanResult() { return scanResult_; } private void initFields() { scanResult_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, scanResult_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, scanResult_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) obj; boolean result = true; result = result && (hasScanResult() == other.hasScanResult()); if (hasScanResult()) { result = result && (getScanResult() == other.getScanResult()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScanResult()) { hash = (37 * hash) + SCANRESULT_FIELD_NUMBER; hash = (53 * hash) + getScanResult(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); scanResult_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.scanResult_ = scanResult_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()) return this; if (other.hasScanResult()) { setScanResult(other.getScanResult()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; scanResult_ = input.readInt32(); break; } } } } private int bitField0_; // optional int32 scanResult = 1; private int scanResult_ ; public boolean hasScanResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getScanResult() { return scanResult_; } public Builder setScanResult(int value) { bitField0_ |= 0x00000001; scanResult_ = value; onChanged(); return this; } public Builder clearScanResult() { bitField0_ = (bitField0_ & ~0x00000001); scanResult_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:CatalogScanResponse) } static { defaultInstance = new CatalogScanResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:CatalogScanResponse) } public interface EnableCatalogJanitorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool enable = 1; boolean hasEnable(); boolean getEnable(); } public static final class EnableCatalogJanitorRequest extends com.google.protobuf.GeneratedMessage implements EnableCatalogJanitorRequestOrBuilder { // Use EnableCatalogJanitorRequest.newBuilder() to construct. private EnableCatalogJanitorRequest(Builder builder) { super(builder); } private EnableCatalogJanitorRequest(boolean noInit) {} private static final EnableCatalogJanitorRequest defaultInstance; public static EnableCatalogJanitorRequest getDefaultInstance() { return defaultInstance; } public EnableCatalogJanitorRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; } private int bitField0_; // required bool enable = 1; public static final int ENABLE_FIELD_NUMBER = 1; private boolean enable_; public boolean hasEnable() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getEnable() { return enable_; } private void initFields() { enable_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasEnable()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, enable_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, enable_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) obj; boolean result = true; result = result && (hasEnable() == other.hasEnable()); if (hasEnable()) { result = result && (getEnable() == other.getEnable()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEnable()) { hash = (37 * hash) + ENABLE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getEnable()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); enable_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.enable_ = enable_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance()) return this; if (other.hasEnable()) { setEnable(other.getEnable()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasEnable()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; enable_ = input.readBool(); break; } } } } private int bitField0_; // required bool enable = 1; private boolean enable_ ; public boolean hasEnable() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getEnable() { return enable_; } public Builder setEnable(boolean value) { bitField0_ |= 0x00000001; enable_ = value; onChanged(); return this; } public Builder clearEnable() { bitField0_ = (bitField0_ & ~0x00000001); enable_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorRequest) } static { defaultInstance = new EnableCatalogJanitorRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableCatalogJanitorRequest) } public interface EnableCatalogJanitorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool prevValue = 1; boolean hasPrevValue(); boolean getPrevValue(); } public static final class EnableCatalogJanitorResponse extends com.google.protobuf.GeneratedMessage implements EnableCatalogJanitorResponseOrBuilder { // Use EnableCatalogJanitorResponse.newBuilder() to construct. private EnableCatalogJanitorResponse(Builder builder) { super(builder); } private EnableCatalogJanitorResponse(boolean noInit) {} private static final EnableCatalogJanitorResponse defaultInstance; public static EnableCatalogJanitorResponse getDefaultInstance() { return defaultInstance; } public EnableCatalogJanitorResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; } private int bitField0_; // optional bool prevValue = 1; public static final int PREVVALUE_FIELD_NUMBER = 1; private boolean prevValue_; public boolean hasPrevValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getPrevValue() { return prevValue_; } private void initFields() { prevValue_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, prevValue_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, prevValue_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) obj; boolean result = true; result = result && (hasPrevValue() == other.hasPrevValue()); if (hasPrevValue()) { result = result && (getPrevValue() == other.getPrevValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPrevValue()) { hash = (37 * hash) + PREVVALUE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getPrevValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); prevValue_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.prevValue_ = prevValue_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()) return this; if (other.hasPrevValue()) { setPrevValue(other.getPrevValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; prevValue_ = input.readBool(); break; } } } } private int bitField0_; // optional bool prevValue = 1; private boolean prevValue_ ; public boolean hasPrevValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getPrevValue() { return prevValue_; } public Builder setPrevValue(boolean value) { bitField0_ |= 0x00000001; prevValue_ = value; onChanged(); return this; } public Builder clearPrevValue() { bitField0_ = (bitField0_ & ~0x00000001); prevValue_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorResponse) } static { defaultInstance = new EnableCatalogJanitorResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:EnableCatalogJanitorResponse) } public interface IsCatalogJanitorEnabledRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class IsCatalogJanitorEnabledRequest extends com.google.protobuf.GeneratedMessage implements IsCatalogJanitorEnabledRequestOrBuilder { // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct. private IsCatalogJanitorEnabledRequest(Builder builder) { super(builder); } private IsCatalogJanitorEnabledRequest(boolean noInit) {} private static final IsCatalogJanitorEnabledRequest defaultInstance; public static IsCatalogJanitorEnabledRequest getDefaultInstance() { return defaultInstance; } public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledRequest) } static { defaultInstance = new IsCatalogJanitorEnabledRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledRequest) } public interface IsCatalogJanitorEnabledResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bool value = 1; boolean hasValue(); boolean getValue(); } public static final class IsCatalogJanitorEnabledResponse extends com.google.protobuf.GeneratedMessage implements IsCatalogJanitorEnabledResponseOrBuilder { // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct. private IsCatalogJanitorEnabledResponse(Builder builder) { super(builder); } private IsCatalogJanitorEnabledResponse(boolean noInit) {} private static final IsCatalogJanitorEnabledResponse defaultInstance; public static IsCatalogJanitorEnabledResponse getDefaultInstance() { return defaultInstance; } public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; } private int bitField0_; // required bool value = 1; public static final int VALUE_FIELD_NUMBER = 1; private boolean value_; public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getValue() { return value_; } private void initFields() { value_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasValue()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, value_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, value_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) obj; boolean result = true; result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && (getValue() == other.getValue()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getValue()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); value_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse build() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()) return this; if (other.hasValue()) { setValue(other.getValue()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasValue()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; value_ = input.readBool(); break; } } } } private int bitField0_; // required bool value = 1; private boolean value_ ; public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } public boolean getValue() { return value_; } public Builder setValue(boolean value) { bitField0_ |= 0x00000001; value_ = value; onChanged(); return this; } public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000001); value_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledResponse) } static { defaultInstance = new IsCatalogJanitorEnabledResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse) } public static abstract class MasterAdminService implements com.google.protobuf.Service { protected MasterAdminService() {} public interface Interface { public abstract void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse> done); public abstract void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse> done); public abstract void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse> done); public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse> done); public abstract void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse> done); public abstract void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse> done); public abstract void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse> done); public abstract void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse> done); public abstract void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse> done); public abstract void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse> done); public abstract void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse> done); public abstract void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse> done); public abstract void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse> done); public abstract void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse> done); public abstract void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse> done); public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse> done); public abstract void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse> done); public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse> done); public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse> done); public abstract void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MasterAdminService() { @java.lang.Override public void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse> done) { impl.addColumn(controller, request, done); } @java.lang.Override public void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse> done) { impl.deleteColumn(controller, request, done); } @java.lang.Override public void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse> done) { impl.modifyColumn(controller, request, done); } @java.lang.Override public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse> done) { impl.moveRegion(controller, request, done); } @java.lang.Override public void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse> done) { impl.assignRegion(controller, request, done); } @java.lang.Override public void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse> done) { impl.unassignRegion(controller, request, done); } @java.lang.Override public void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse> done) { impl.offlineRegion(controller, request, done); } @java.lang.Override public void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse> done) { impl.deleteTable(controller, request, done); } @java.lang.Override public void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse> done) { impl.enableTable(controller, request, done); } @java.lang.Override public void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse> done) { impl.disableTable(controller, request, done); } @java.lang.Override public void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse> done) { impl.modifyTable(controller, request, done); } @java.lang.Override public void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse> done) { impl.createTable(controller, request, done); } @java.lang.Override public void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse> done) { impl.shutdown(controller, request, done); } @java.lang.Override public void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse> done) { impl.stopMaster(controller, request, done); } @java.lang.Override public void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse> done) { impl.balance(controller, request, done); } @java.lang.Override public void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse> done) { impl.setBalancerRunning(controller, request, done); } @java.lang.Override public void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse> done) { impl.runCatalogScan(controller, request, done); } @java.lang.Override public void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse> done) { impl.enableCatalogJanitor(controller, request, done); } @java.lang.Override public void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse> done) { impl.isCatalogJanitorEnabled(controller, request, done); } @java.lang.Override public void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { impl.execMasterService(controller, request, done); } }; } public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request) throws com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)request); case 1: return impl.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)request); case 2: return impl.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)request); case 3: return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)request); case 4: return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)request); case 5: return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)request); case 6: return impl.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)request); case 7: return impl.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)request); case 8: return impl.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)request); case 9: return impl.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)request); case 10: return impl.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)request); case 11: return impl.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)request); case 12: return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)request); case 13: return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)request); case 14: return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)request); case 15: return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request); case 16: return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request); case 17: return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request); case 18: return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)request); case 19: return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } public abstract void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse> done); public abstract void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse> done); public abstract void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse> done); public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse> done); public abstract void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse> done); public abstract void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse> done); public abstract void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse> done); public abstract void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse> done); public abstract void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse> done); public abstract void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse> done); public abstract void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse> done); public abstract void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse> done); public abstract void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse> done); public abstract void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse> done); public abstract void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse> done); public abstract void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse> done); public abstract void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse> done); public abstract void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse> done); public abstract void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse> done); public abstract void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request, com.google.protobuf.RpcCallback< com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse>specializeCallback( done)); return; case 1: this.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse>specializeCallback( done)); return; case 2: this.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse>specializeCallback( done)); return; case 3: this.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse>specializeCallback( done)); return; case 4: this.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse>specializeCallback( done)); return; case 5: this.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse>specializeCallback( done)); return; case 6: this.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse>specializeCallback( done)); return; case 7: this.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse>specializeCallback( done)); return; case 8: this.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse>specializeCallback( done)); return; case 9: this.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse>specializeCallback( done)); return; case 10: this.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse>specializeCallback( done)); return; case 11: this.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse>specializeCallback( done)); return; case 12: this.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse>specializeCallback( done)); return; case 13: this.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse>specializeCallback( done)); return; case 14: this.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse>specializeCallback( done)); return; case 15: this.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse>specializeCallback( done)); return; case 16: this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse>specializeCallback( done)); return; case 17: this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse>specializeCallback( done)); return; case 18: this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse>specializeCallback( done)); return; case 19: this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(); case 9: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(); case 10: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(); case 11: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(); case 12: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(); case 13: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(); case 14: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(); case 15: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); case 17: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); case 19: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MasterAdminService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } private final com.google.protobuf.RpcChannel channel; public com.google.protobuf.RpcChannel getChannel() { return channel; } public void addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse> done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance())); } public void deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse> done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance())); } public void modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse> done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance())); } public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance())); } public void assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance())); } public void unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance())); } public void offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse> done) { channel.callMethod( getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance())); } public void deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance())); } public void enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance())); } public void disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(9), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance())); } public void modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(10), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance())); } public void createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse> done) { channel.callMethod( getDescriptor().getMethods().get(11), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance())); } public void shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse> done) { channel.callMethod( getDescriptor().getMethods().get(12), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance())); } public void stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse> done) { channel.callMethod( getDescriptor().getMethods().get(13), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance())); } public void balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(14), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance())); } public void setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse> done) { channel.callMethod( getDescriptor().getMethods().get(15), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance())); } public void runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse> done) { channel.callMethod( getDescriptor().getMethods().get(16), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance())); } public void enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse> done) { channel.callMethod( getDescriptor().getMethods().get(17), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance())); } public void isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse> done) { channel.callMethod( getDescriptor().getMethods().get(18), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance())); } public void execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(19), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); } } public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } private final com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(9), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(10), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(11), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(12), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(13), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(14), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(15), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(16), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(17), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(18), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(19), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); } } } private static com.google.protobuf.Descriptors.Descriptor internal_static_AddColumnRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AddColumnRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AddColumnResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AddColumnResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteColumnRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteColumnRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteColumnResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteColumnResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyColumnRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyColumnRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyColumnResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyColumnResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_MoveRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MoveRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_MoveRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MoveRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AssignRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AssignRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AssignRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_AssignRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_UnassignRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_UnassignRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_UnassignRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_UnassignRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_OfflineRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_OfflineRegionRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_OfflineRegionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_OfflineRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CreateTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CreateTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CreateTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CreateTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DeleteTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DisableTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DisableTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DisableTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_DisableTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyTableRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyTableRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ModifyTableResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyTableResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ShutdownRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ShutdownRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ShutdownResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ShutdownResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_StopMasterRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_StopMasterRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_StopMasterResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_StopMasterResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_BalanceRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BalanceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_BalanceResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BalanceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_SetBalancerRunningRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SetBalancerRunningRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_SetBalancerRunningResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SetBalancerRunningResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CatalogScanRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CatalogScanRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CatalogScanResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CatalogScanResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableCatalogJanitorRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableCatalogJanitorRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableCatalogJanitorResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_EnableCatalogJanitorResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsCatalogJanitorEnabledRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_IsCatalogJanitorEnabledResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\021MasterAdmin.proto\032\013hbase.proto\032\014Client" + ".proto\"R\n\020AddColumnRequest\022\021\n\ttableName\030" + "\001 \002(\014\022+\n\016columnFamilies\030\002 \002(\0132\023.ColumnFa" + "milySchema\"\023\n\021AddColumnResponse\"<\n\023Delet" + "eColumnRequest\022\021\n\ttableName\030\001 \002(\014\022\022\n\ncol" + "umnName\030\002 \002(\014\"\026\n\024DeleteColumnResponse\"U\n" + "\023ModifyColumnRequest\022\021\n\ttableName\030\001 \002(\014\022" + "+\n\016columnFamilies\030\002 \002(\0132\023.ColumnFamilySc" + "hema\"\026\n\024ModifyColumnResponse\"Z\n\021MoveRegi" + "onRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif", "ier\022#\n\016destServerName\030\002 \001(\0132\013.ServerName" + "\"\024\n\022MoveRegionResponse\"7\n\023AssignRegionRe" + "quest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\"" + "\026\n\024AssignRegionResponse\"O\n\025UnassignRegio" + "nRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + "er\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026UnassignRegi" + "onResponse\"8\n\024OfflineRegionRequest\022 \n\006re" + "gion\030\001 \002(\0132\020.RegionSpecifier\"\027\n\025OfflineR" + "egionResponse\"J\n\022CreateTableRequest\022!\n\013t" + "ableSchema\030\001 \002(\0132\014.TableSchema\022\021\n\tsplitK", "eys\030\002 \003(\014\"\025\n\023CreateTableResponse\"\'\n\022Dele" + "teTableRequest\022\021\n\ttableName\030\001 \002(\014\"\025\n\023Del" + "eteTableResponse\"\'\n\022EnableTableRequest\022\021" + "\n\ttableName\030\001 \002(\014\"\025\n\023EnableTableResponse" + "\"(\n\023DisableTableRequest\022\021\n\ttableName\030\001 \002" + "(\014\"\026\n\024DisableTableResponse\"J\n\022ModifyTabl" + "eRequest\022\021\n\ttableName\030\001 \002(\014\022!\n\013tableSche" + "ma\030\002 \002(\0132\014.TableSchema\"\025\n\023ModifyTableRes" + "ponse\"\021\n\017ShutdownRequest\"\022\n\020ShutdownResp" + "onse\"\023\n\021StopMasterRequest\"\024\n\022StopMasterR", "esponse\"\020\n\016BalanceRequest\"&\n\017BalanceResp" + "onse\022\023\n\013balancerRan\030\001 \002(\010\"<\n\031SetBalancer" + "RunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronou" + "s\030\002 \001(\010\"6\n\032SetBalancerRunningResponse\022\030\n" + "\020prevBalanceValue\030\001 \001(\010\"\024\n\022CatalogScanRe" + "quest\")\n\023CatalogScanResponse\022\022\n\nscanResu" + "lt\030\001 \001(\005\"-\n\033EnableCatalogJanitorRequest\022" + "\016\n\006enable\030\001 \002(\010\"1\n\034EnableCatalogJanitorR" + "esponse\022\021\n\tprevValue\030\001 \001(\010\" \n\036IsCatalogJ" + "anitorEnabledRequest\"0\n\037IsCatalogJanitor", "EnabledResponse\022\r\n\005value\030\001 \002(\0102\201\n\n\022Maste" + "rAdminService\0222\n\taddColumn\022\021.AddColumnRe" + "quest\032\022.AddColumnResponse\022;\n\014deleteColum" + "n\022\024.DeleteColumnRequest\032\025.DeleteColumnRe" + "sponse\022;\n\014modifyColumn\022\024.ModifyColumnReq" + "uest\032\025.ModifyColumnResponse\0225\n\nmoveRegio" + "n\022\022.MoveRegionRequest\032\023.MoveRegionRespon" + "se\022;\n\014assignRegion\022\024.AssignRegionRequest" + "\032\025.AssignRegionResponse\022A\n\016unassignRegio" + "n\022\026.UnassignRegionRequest\032\027.UnassignRegi", "onResponse\022>\n\rofflineRegion\022\025.OfflineReg" + "ionRequest\032\026.OfflineRegionResponse\0228\n\013de" + "leteTable\022\023.DeleteTableRequest\032\024.DeleteT" + "ableResponse\0228\n\013enableTable\022\023.EnableTabl" + "eRequest\032\024.EnableTableResponse\022;\n\014disabl" + "eTable\022\024.DisableTableRequest\032\025.DisableTa" + "bleResponse\0228\n\013modifyTable\022\023.ModifyTable" + "Request\032\024.ModifyTableResponse\0228\n\013createT" + "able\022\023.CreateTableRequest\032\024.CreateTableR" + "esponse\022/\n\010shutdown\022\020.ShutdownRequest\032\021.", "ShutdownResponse\0225\n\nstopMaster\022\022.StopMas" + "terRequest\032\023.StopMasterResponse\022,\n\007balan" + "ce\022\017.BalanceRequest\032\020.BalanceResponse\022M\n" + "\022setBalancerRunning\022\032.SetBalancerRunning" + "Request\032\033.SetBalancerRunningResponse\022;\n\016" + "runCatalogScan\022\023.CatalogScanRequest\032\024.Ca" + "talogScanResponse\022S\n\024enableCatalogJanito" + "r\022\034.EnableCatalogJanitorRequest\032\035.Enable" + "CatalogJanitorResponse\022\\\n\027isCatalogJanit" + "orEnabled\022\037.IsCatalogJanitorEnabledReque", "st\032 .IsCatalogJanitorEnabledResponse\022L\n\021" + "execMasterService\022\032.CoprocessorServiceRe" + "quest\032\033.CoprocessorServiceResponseBG\n*or" + "g.apache.hadoop.hbase.protobuf.generated" + "B\021MasterAdminProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_AddColumnRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_AddColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddColumnRequest_descriptor, new java.lang.String[] { "TableName", "ColumnFamilies", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest.Builder.class); internal_static_AddColumnResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_AddColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddColumnResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnResponse.Builder.class); internal_static_DeleteColumnRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_DeleteColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteColumnRequest_descriptor, new java.lang.String[] { "TableName", "ColumnName", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest.Builder.class); internal_static_DeleteColumnResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_DeleteColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteColumnResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnResponse.Builder.class); internal_static_ModifyColumnRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_ModifyColumnRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyColumnRequest_descriptor, new java.lang.String[] { "TableName", "ColumnFamilies", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest.Builder.class); internal_static_ModifyColumnResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ModifyColumnResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyColumnResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse.Builder.class); internal_static_MoveRegionRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_MoveRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionRequest_descriptor, new java.lang.String[] { "Region", "DestServerName", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest.Builder.class); internal_static_MoveRegionResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_MoveRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionResponse.Builder.class); internal_static_AssignRegionRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_AssignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionRequest_descriptor, new java.lang.String[] { "Region", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest.Builder.class); internal_static_AssignRegionResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_AssignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse.Builder.class); internal_static_UnassignRegionRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_UnassignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionRequest_descriptor, new java.lang.String[] { "Region", "Force", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest.Builder.class); internal_static_UnassignRegionResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_UnassignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse.Builder.class); internal_static_OfflineRegionRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_OfflineRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OfflineRegionRequest_descriptor, new java.lang.String[] { "Region", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionRequest.Builder.class); internal_static_OfflineRegionResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_OfflineRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OfflineRegionResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.OfflineRegionResponse.Builder.class); internal_static_CreateTableRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_CreateTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateTableRequest_descriptor, new java.lang.String[] { "TableSchema", "SplitKeys", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest.Builder.class); internal_static_CreateTableResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_CreateTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CreateTableResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse.Builder.class); internal_static_DeleteTableRequest_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_DeleteTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteTableRequest_descriptor, new java.lang.String[] { "TableName", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest.Builder.class); internal_static_DeleteTableResponse_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_DeleteTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DeleteTableResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableResponse.Builder.class); internal_static_EnableTableRequest_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_EnableTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableTableRequest_descriptor, new java.lang.String[] { "TableName", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableRequest.Builder.class); internal_static_EnableTableResponse_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_EnableTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableTableResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableTableResponse.Builder.class); internal_static_DisableTableRequest_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_DisableTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DisableTableRequest_descriptor, new java.lang.String[] { "TableName", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableRequest.Builder.class); internal_static_DisableTableResponse_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_DisableTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DisableTableResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DisableTableResponse.Builder.class); internal_static_ModifyTableRequest_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_ModifyTableRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyTableRequest_descriptor, new java.lang.String[] { "TableName", "TableSchema", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest.Builder.class); internal_static_ModifyTableResponse_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_ModifyTableResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyTableResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableResponse.Builder.class); internal_static_ShutdownRequest_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_ShutdownRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownRequest_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest.Builder.class); internal_static_ShutdownResponse_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_ShutdownResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse.Builder.class); internal_static_StopMasterRequest_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_StopMasterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterRequest_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest.Builder.class); internal_static_StopMasterResponse_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_StopMasterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse.Builder.class); internal_static_BalanceRequest_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_BalanceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceRequest_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest.Builder.class); internal_static_BalanceResponse_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_BalanceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceResponse_descriptor, new java.lang.String[] { "BalancerRan", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse.Builder.class); internal_static_SetBalancerRunningRequest_descriptor = getDescriptor().getMessageTypes().get(30); internal_static_SetBalancerRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningRequest_descriptor, new java.lang.String[] { "On", "Synchronous", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.Builder.class); internal_static_SetBalancerRunningResponse_descriptor = getDescriptor().getMessageTypes().get(31); internal_static_SetBalancerRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningResponse_descriptor, new java.lang.String[] { "PrevBalanceValue", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.Builder.class); internal_static_CatalogScanRequest_descriptor = getDescriptor().getMessageTypes().get(32); internal_static_CatalogScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CatalogScanRequest_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); internal_static_CatalogScanResponse_descriptor = getDescriptor().getMessageTypes().get(33); internal_static_CatalogScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CatalogScanResponse_descriptor, new java.lang.String[] { "ScanResult", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); internal_static_EnableCatalogJanitorRequest_descriptor = getDescriptor().getMessageTypes().get(34); internal_static_EnableCatalogJanitorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableCatalogJanitorRequest_descriptor, new java.lang.String[] { "Enable", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.Builder.class); internal_static_EnableCatalogJanitorResponse_descriptor = getDescriptor().getMessageTypes().get(35); internal_static_EnableCatalogJanitorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_EnableCatalogJanitorResponse_descriptor, new java.lang.String[] { "PrevValue", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.Builder.class); internal_static_IsCatalogJanitorEnabledRequest_descriptor = getDescriptor().getMessageTypes().get(36); internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledRequest_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledRequest.Builder.class); internal_static_IsCatalogJanitorEnabledResponse_descriptor = getDescriptor().getMessageTypes().get(37); internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledResponse_descriptor, new java.lang.String[] { "Value", }, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsCatalogJanitorEnabledResponse.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }