// Generated by the protocol buffer compiler. DO NOT EDIT! // source: RPC.proto package org.apache.hadoop.hbase.protobuf.generated; public final class RPCProtos { private RPCProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface UserInformationOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string effectiveUser = 1; boolean hasEffectiveUser(); String getEffectiveUser(); // optional string realUser = 2; boolean hasRealUser(); String getRealUser(); } public static final class UserInformation extends com.google.protobuf.GeneratedMessage implements UserInformationOrBuilder { // Use UserInformation.newBuilder() to construct. private UserInformation(Builder builder) { super(builder); } private UserInformation(boolean noInit) {} private static final UserInformation defaultInstance; public static UserInformation getDefaultInstance() { return defaultInstance; } public UserInformation getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; } private int bitField0_; // required string effectiveUser = 1; public static final int EFFECTIVEUSER_FIELD_NUMBER = 1; private java.lang.Object effectiveUser_; public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { effectiveUser_ = s; } return s; } } private com.google.protobuf.ByteString getEffectiveUserBytes() { java.lang.Object ref = effectiveUser_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); effectiveUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string realUser = 2; public static final int REALUSER_FIELD_NUMBER = 2; private java.lang.Object realUser_; public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getRealUser() { java.lang.Object ref = realUser_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { realUser_ = s; } return s; } } private com.google.protobuf.ByteString getRealUserBytes() { java.lang.Object ref = realUser_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); realUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { effectiveUser_ = ""; realUser_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasEffectiveUser()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getEffectiveUserBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getRealUserBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getEffectiveUserBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getRealUserBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) obj; boolean result = true; result = result && (hasEffectiveUser() == other.hasEffectiveUser()); if (hasEffectiveUser()) { result = result && getEffectiveUser() .equals(other.getEffectiveUser()); } result = result && (hasRealUser() == other.hasRealUser()); if (hasRealUser()) { result = result && getRealUser() .equals(other.getRealUser()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEffectiveUser()) { hash = (37 * hash) + EFFECTIVEUSER_FIELD_NUMBER; hash = (53 * hash) + getEffectiveUser().hashCode(); } if (hasRealUser()) { hash = (37 * hash) + REALUSER_FIELD_NUMBER; hash = (53 * hash) + getRealUser().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_UserInformation_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); effectiveUser_ = ""; bitField0_ = (bitField0_ & ~0x00000001); realUser_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.effectiveUser_ = effectiveUser_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.realUser_ = realUser_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this; if (other.hasEffectiveUser()) { setEffectiveUser(other.getEffectiveUser()); } if (other.hasRealUser()) { setRealUser(other.getRealUser()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasEffectiveUser()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; effectiveUser_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; realUser_ = input.readBytes(); break; } } } } private int bitField0_; // required string effectiveUser = 1; private java.lang.Object effectiveUser_ = ""; public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); effectiveUser_ = s; return s; } else { return (String) ref; } } public Builder setEffectiveUser(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); return this; } public Builder clearEffectiveUser() { bitField0_ = (bitField0_ & ~0x00000001); effectiveUser_ = getDefaultInstance().getEffectiveUser(); onChanged(); return this; } void setEffectiveUser(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); } // optional string realUser = 2; private java.lang.Object realUser_ = ""; public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getRealUser() { java.lang.Object ref = realUser_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); realUser_ = s; return s; } else { return (String) ref; } } public Builder setRealUser(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; realUser_ = value; onChanged(); return this; } public Builder clearRealUser() { bitField0_ = (bitField0_ & ~0x00000002); realUser_ = getDefaultInstance().getRealUser(); onChanged(); return this; } void setRealUser(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000002; realUser_ = value; onChanged(); } // @@protoc_insertion_point(builder_scope:UserInformation) } static { defaultInstance = new UserInformation(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:UserInformation) } public interface ConnectionHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .UserInformation userInfo = 1; boolean hasUserInfo(); org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo(); org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; boolean hasProtocol(); String getProtocol(); } public static final class ConnectionHeader extends com.google.protobuf.GeneratedMessage implements ConnectionHeaderOrBuilder { // Use ConnectionHeader.newBuilder() to construct. private ConnectionHeader(Builder builder) { super(builder); } private ConnectionHeader(boolean noInit) {} private static final ConnectionHeader defaultInstance; public static ConnectionHeader getDefaultInstance() { return defaultInstance; } public ConnectionHeader getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; } private int bitField0_; // optional .UserInformation userInfo = 1; public static final int USERINFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_; public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { return userInfo_; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { return userInfo_; } // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; public static final int PROTOCOL_FIELD_NUMBER = 2; private java.lang.Object protocol_; public boolean hasProtocol() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getProtocol() { java.lang.Object ref = protocol_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { protocol_ = s; } return s; } } private com.google.protobuf.ByteString getProtocolBytes() { java.lang.Object ref = protocol_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); protocol_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, userInfo_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getProtocolBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, userInfo_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getProtocolBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) obj; boolean result = true; result = result && (hasUserInfo() == other.hasUserInfo()); if (hasUserInfo()) { result = result && getUserInfo() .equals(other.getUserInfo()); } result = result && (hasProtocol() == other.hasProtocol()); if (hasProtocol()) { result = result && getProtocol() .equals(other.getProtocol()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUserInfo()) { hash = (37 * hash) + USERINFO_FIELD_NUMBER; hash = (53 * hash) + getUserInfo().hashCode(); } if (hasProtocol()) { hash = (37 * hash) + PROTOCOL_FIELD_NUMBER; hash = (53 * hash) + getProtocol().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ConnectionHeader_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (userInfoBuilder_ == null) { result.userInfo_ = userInfo_; } else { result.userInfo_ = userInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.protocol_ = protocol_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this; if (other.hasUserInfo()) { mergeUserInfo(other.getUserInfo()); } if (other.hasProtocol()) { setProtocol(other.getProtocol()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(); if (hasUserInfo()) { subBuilder.mergeFrom(getUserInfo()); } input.readMessage(subBuilder, extensionRegistry); setUserInfo(subBuilder.buildPartial()); break; } case 18: { bitField0_ |= 0x00000002; protocol_ = input.readBytes(); break; } } } } private int bitField0_; // optional .UserInformation userInfo = 1; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { return userInfo_; } else { return userInfoBuilder_.getMessage(); } } public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userInfo_ = value; onChanged(); } else { userInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } public Builder setUserInfo( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) { if (userInfoBuilder_ == null) { userInfo_ = builderForValue.build(); onChanged(); } else { userInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); } else { userInfo_ = value; } onChanged(); } else { userInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } public Builder clearUserInfo() { if (userInfoBuilder_ == null) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); onChanged(); } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUserInfoFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { return userInfo_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>( userInfo_, getParentForChildren(), isClean()); userInfo_ = null; } return userInfoBuilder_; } // optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; private java.lang.Object protocol_ = "org.apache.hadoop.hbase.client.ClientProtocol"; public boolean hasProtocol() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getProtocol() { java.lang.Object ref = protocol_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); protocol_ = s; return s; } else { return (String) ref; } } public Builder setProtocol(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; protocol_ = value; onChanged(); return this; } public Builder clearProtocol() { bitField0_ = (bitField0_ & ~0x00000002); protocol_ = getDefaultInstance().getProtocol(); onChanged(); return this; } void setProtocol(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000002; protocol_ = value; onChanged(); } // @@protoc_insertion_point(builder_scope:ConnectionHeader) } static { defaultInstance = new ConnectionHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:ConnectionHeader) } public interface RpcRequestHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // required uint32 callId = 1; boolean hasCallId(); int getCallId(); // optional .RPCTInfo tinfo = 2; boolean hasTinfo(); org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo(); org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder(); } public static final class RpcRequestHeader extends com.google.protobuf.GeneratedMessage implements RpcRequestHeaderOrBuilder { // Use RpcRequestHeader.newBuilder() to construct. private RpcRequestHeader(Builder builder) { super(builder); } private RpcRequestHeader(boolean noInit) {} private static final RpcRequestHeader defaultInstance; public static RpcRequestHeader getDefaultInstance() { return defaultInstance; } public RpcRequestHeader getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; } private int bitField0_; // required uint32 callId = 1; public static final int CALLID_FIELD_NUMBER = 1; private int callId_; public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getCallId() { return callId_; } // optional .RPCTInfo tinfo = 2; public static final int TINFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_; public boolean hasTinfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { return tinfo_; } public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { return tinfo_; } private void initFields() { callId_ = 0; tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCallId()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, tinfo_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, tinfo_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) obj; boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { result = result && (getCallId() == other.getCallId()); } result = result && (hasTinfo() == other.hasTinfo()); if (hasTinfo()) { result = result && getTinfo() .equals(other.getTinfo()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCallId()) { hash = (37 * hash) + CALLID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } if (hasTinfo()) { hash = (37 * hash) + TINFO_FIELD_NUMBER; hash = (53 * hash) + getTinfo().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTinfoFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (tinfoBuilder_ == null) { tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); } else { tinfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (tinfoBuilder_ == null) { result.tinfo_ = tinfo_; } else { result.tinfo_ = tinfoBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance()) return this; if (other.hasCallId()) { setCallId(other.getCallId()); } if (other.hasTinfo()) { mergeTinfo(other.getTinfo()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCallId()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; callId_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(); if (hasTinfo()) { subBuilder.mergeFrom(getTinfo()); } input.readMessage(subBuilder, extensionRegistry); setTinfo(subBuilder.buildPartial()); break; } } } } private int bitField0_; // required uint32 callId = 1; private int callId_ ; public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getCallId() { return callId_; } public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } // optional .RPCTInfo tinfo = 2; private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> tinfoBuilder_; public boolean hasTinfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { if (tinfoBuilder_ == null) { return tinfo_; } else { return tinfoBuilder_.getMessage(); } } public Builder setTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { if (tinfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tinfo_ = value; onChanged(); } else { tinfoBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } public Builder setTinfo( org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { if (tinfoBuilder_ == null) { tinfo_ = builderForValue.build(); onChanged(); } else { tinfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } public Builder mergeTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { if (tinfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && tinfo_ != org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) { tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(tinfo_).mergeFrom(value).buildPartial(); } else { tinfo_ = value; } onChanged(); } else { tinfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } public Builder clearTinfo() { if (tinfoBuilder_ == null) { tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); onChanged(); } else { tinfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTinfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTinfoFieldBuilder().getBuilder(); } public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { if (tinfoBuilder_ != null) { return tinfoBuilder_.getMessageOrBuilder(); } else { return tinfo_; } } private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> getTinfoFieldBuilder() { if (tinfoBuilder_ == null) { tinfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder>( tinfo_, getParentForChildren(), isClean()); tinfo_ = null; } return tinfoBuilder_; } // @@protoc_insertion_point(builder_scope:RpcRequestHeader) } static { defaultInstance = new RpcRequestHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RpcRequestHeader) } public interface RpcRequestBodyOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string methodName = 1; boolean hasMethodName(); String getMethodName(); // optional uint64 clientProtocolVersion = 2; boolean hasClientProtocolVersion(); long getClientProtocolVersion(); // optional bytes request = 3; boolean hasRequest(); com.google.protobuf.ByteString getRequest(); // optional string requestClassName = 4; boolean hasRequestClassName(); String getRequestClassName(); } public static final class RpcRequestBody extends com.google.protobuf.GeneratedMessage implements RpcRequestBodyOrBuilder { // Use RpcRequestBody.newBuilder() to construct. private RpcRequestBody(Builder builder) { super(builder); } private RpcRequestBody(boolean noInit) {} private static final RpcRequestBody defaultInstance; public static RpcRequestBody getDefaultInstance() { return defaultInstance; } public RpcRequestBody getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; } private int bitField0_; // required string methodName = 1; public static final int METHODNAME_FIELD_NUMBER = 1; private java.lang.Object methodName_; public boolean hasMethodName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getMethodName() { java.lang.Object ref = methodName_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { methodName_ = s; } return s; } } private com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional uint64 clientProtocolVersion = 2; public static final int CLIENTPROTOCOLVERSION_FIELD_NUMBER = 2; private long clientProtocolVersion_; public boolean hasClientProtocolVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } public long getClientProtocolVersion() { return clientProtocolVersion_; } // optional bytes request = 3; public static final int REQUEST_FIELD_NUMBER = 3; private com.google.protobuf.ByteString request_; public boolean hasRequest() { return ((bitField0_ & 0x00000004) == 0x00000004); } public com.google.protobuf.ByteString getRequest() { return request_; } // optional string requestClassName = 4; public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4; private java.lang.Object requestClassName_; public boolean hasRequestClassName() { return ((bitField0_ & 0x00000008) == 0x00000008); } public String getRequestClassName() { java.lang.Object ref = requestClassName_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { requestClassName_ = s; } return s; } } private com.google.protobuf.ByteString getRequestClassNameBytes() { java.lang.Object ref = requestClassName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); requestClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { methodName_ = ""; clientProtocolVersion_ = 0L; request_ = com.google.protobuf.ByteString.EMPTY; requestClassName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasMethodName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getMethodNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, clientProtocolVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, request_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getRequestClassNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getMethodNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, clientProtocolVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, request_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, getRequestClassNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) obj; boolean result = true; result = result && (hasMethodName() == other.hasMethodName()); if (hasMethodName()) { result = result && getMethodName() .equals(other.getMethodName()); } result = result && (hasClientProtocolVersion() == other.hasClientProtocolVersion()); if (hasClientProtocolVersion()) { result = result && (getClientProtocolVersion() == other.getClientProtocolVersion()); } result = result && (hasRequest() == other.hasRequest()); if (hasRequest()) { result = result && getRequest() .equals(other.getRequest()); } result = result && (hasRequestClassName() == other.hasRequestClassName()); if (hasRequestClassName()) { result = result && getRequestClassName() .equals(other.getRequestClassName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasMethodName()) { hash = (37 * hash) + METHODNAME_FIELD_NUMBER; hash = (53 * hash) + getMethodName().hashCode(); } if (hasClientProtocolVersion()) { hash = (37 * hash) + CLIENTPROTOCOLVERSION_FIELD_NUMBER; hash = (53 * hash) + hashLong(getClientProtocolVersion()); } if (hasRequest()) { hash = (37 * hash) + REQUEST_FIELD_NUMBER; hash = (53 * hash) + getRequest().hashCode(); } if (hasRequestClassName()) { hash = (37 * hash) + REQUESTCLASSNAME_FIELD_NUMBER; hash = (53 * hash) + getRequestClassName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBodyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); methodName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); clientProtocolVersion_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); request_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); requestClassName_ = ""; bitField0_ = (bitField0_ & ~0x00000008); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.methodName_ = methodName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.clientProtocolVersion_ = clientProtocolVersion_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.request_ = request_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.requestClassName_ = requestClassName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance()) return this; if (other.hasMethodName()) { setMethodName(other.getMethodName()); } if (other.hasClientProtocolVersion()) { setClientProtocolVersion(other.getClientProtocolVersion()); } if (other.hasRequest()) { setRequest(other.getRequest()); } if (other.hasRequestClassName()) { setRequestClassName(other.getRequestClassName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasMethodName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; methodName_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; clientProtocolVersion_ = input.readUInt64(); break; } case 26: { bitField0_ |= 0x00000004; request_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000008; requestClassName_ = input.readBytes(); break; } } } } private int bitField0_; // required string methodName = 1; private java.lang.Object methodName_ = ""; public boolean hasMethodName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); methodName_ = s; return s; } else { return (String) ref; } } public Builder setMethodName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; methodName_ = value; onChanged(); return this; } public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000001); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } void setMethodName(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000001; methodName_ = value; onChanged(); } // optional uint64 clientProtocolVersion = 2; private long clientProtocolVersion_ ; public boolean hasClientProtocolVersion() { return ((bitField0_ & 0x00000002) == 0x00000002); } public long getClientProtocolVersion() { return clientProtocolVersion_; } public Builder setClientProtocolVersion(long value) { bitField0_ |= 0x00000002; clientProtocolVersion_ = value; onChanged(); return this; } public Builder clearClientProtocolVersion() { bitField0_ = (bitField0_ & ~0x00000002); clientProtocolVersion_ = 0L; onChanged(); return this; } // optional bytes request = 3; private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; public boolean hasRequest() { return ((bitField0_ & 0x00000004) == 0x00000004); } public com.google.protobuf.ByteString getRequest() { return request_; } public Builder setRequest(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; request_ = value; onChanged(); return this; } public Builder clearRequest() { bitField0_ = (bitField0_ & ~0x00000004); request_ = getDefaultInstance().getRequest(); onChanged(); return this; } // optional string requestClassName = 4; private java.lang.Object requestClassName_ = ""; public boolean hasRequestClassName() { return ((bitField0_ & 0x00000008) == 0x00000008); } public String getRequestClassName() { java.lang.Object ref = requestClassName_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); requestClassName_ = s; return s; } else { return (String) ref; } } public Builder setRequestClassName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; requestClassName_ = value; onChanged(); return this; } public Builder clearRequestClassName() { bitField0_ = (bitField0_ & ~0x00000008); requestClassName_ = getDefaultInstance().getRequestClassName(); onChanged(); return this; } void setRequestClassName(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000008; requestClassName_ = value; onChanged(); } // @@protoc_insertion_point(builder_scope:RpcRequestBody) } static { defaultInstance = new RpcRequestBody(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RpcRequestBody) } public interface RpcResponseHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // required uint32 callId = 1; boolean hasCallId(); int getCallId(); // required .RpcResponseHeader.Status status = 2; boolean hasStatus(); org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus(); } public static final class RpcResponseHeader extends com.google.protobuf.GeneratedMessage implements RpcResponseHeaderOrBuilder { // Use RpcResponseHeader.newBuilder() to construct. private RpcResponseHeader(Builder builder) { super(builder); } private RpcResponseHeader(boolean noInit) {} private static final RpcResponseHeader defaultInstance; public static RpcResponseHeader getDefaultInstance() { return defaultInstance; } public RpcResponseHeader getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; } public enum Status implements com.google.protobuf.ProtocolMessageEnum { SUCCESS(0, 0), ERROR(1, 1), FATAL(2, 2), ; public static final int SUCCESS_VALUE = 0; public static final int ERROR_VALUE = 1; public static final int FATAL_VALUE = 2; public final int getNumber() { return value; } public static Status valueOf(int value) { switch (value) { case 0: return SUCCESS; case 1: return ERROR; case 2: return FATAL; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Status> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<Status> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Status>() { public Status findValueByNumber(int number) { return Status.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor().getEnumTypes().get(0); } private static final Status[] VALUES = { SUCCESS, ERROR, FATAL, }; public static Status valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private Status(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:RpcResponseHeader.Status) } private int bitField0_; // required uint32 callId = 1; public static final int CALLID_FIELD_NUMBER = 1; private int callId_; public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getCallId() { return callId_; } // required .RpcResponseHeader.Status status = 2; public static final int STATUS_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_; public boolean hasStatus() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { return status_; } private void initFields() { callId_ = 0; status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasCallId()) { memoizedIsInitialized = 0; return false; } if (!hasStatus()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, status_.getNumber()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, status_.getNumber()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) obj; boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { result = result && (getCallId() == other.getCallId()); } result = result && (hasStatus() == other.hasStatus()); if (hasStatus()) { result = result && (getStatus() == other.getStatus()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCallId()) { hash = (37 * hash) + CALLID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } if (hasStatus()) { hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStatus()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.status_ = status_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance()) return this; if (other.hasCallId()) { setCallId(other.getCallId()); } if (other.hasStatus()) { setStatus(other.getStatus()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasCallId()) { return false; } if (!hasStatus()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; callId_ = input.readUInt32(); break; } case 16: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; status_ = value; } break; } } } } private int bitField0_; // required uint32 callId = 1; private int callId_ ; public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getCallId() { return callId_; } public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } // required .RpcResponseHeader.Status status = 2; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; public boolean hasStatus() { return ((bitField0_ & 0x00000002) == 0x00000002); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { return status_; } public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; status_ = value; onChanged(); return this; } public Builder clearStatus() { bitField0_ = (bitField0_ & ~0x00000002); status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:RpcResponseHeader) } static { defaultInstance = new RpcResponseHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RpcResponseHeader) } public interface RpcResponseBodyOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes response = 1; boolean hasResponse(); com.google.protobuf.ByteString getResponse(); } public static final class RpcResponseBody extends com.google.protobuf.GeneratedMessage implements RpcResponseBodyOrBuilder { // Use RpcResponseBody.newBuilder() to construct. private RpcResponseBody(Builder builder) { super(builder); } private RpcResponseBody(boolean noInit) {} private static final RpcResponseBody defaultInstance; public static RpcResponseBody getDefaultInstance() { return defaultInstance; } public RpcResponseBody getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; } private int bitField0_; // optional bytes response = 1; public static final int RESPONSE_FIELD_NUMBER = 1; private com.google.protobuf.ByteString response_; public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getResponse() { return response_; } private void initFields() { response_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, response_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, response_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) obj; boolean result = true; result = result && (hasResponse() == other.hasResponse()); if (hasResponse()) { result = result && getResponse() .equals(other.getResponse()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasResponse()) { hash = (37 * hash) + RESPONSE_FIELD_NUMBER; hash = (53 * hash) + getResponse().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBodyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); response_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.response_ = response_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance()) return this; if (other.hasResponse()) { setResponse(other.getResponse()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; response_ = input.readBytes(); break; } } } } private int bitField0_; // optional bytes response = 1; private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY; public boolean hasResponse() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getResponse() { return response_; } public Builder setResponse(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; response_ = value; onChanged(); return this; } public Builder clearResponse() { bitField0_ = (bitField0_ & ~0x00000001); response_ = getDefaultInstance().getResponse(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:RpcResponseBody) } static { defaultInstance = new RpcResponseBody(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RpcResponseBody) } public interface RpcExceptionOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string exceptionName = 1; boolean hasExceptionName(); String getExceptionName(); // optional string stackTrace = 2; boolean hasStackTrace(); String getStackTrace(); } public static final class RpcException extends com.google.protobuf.GeneratedMessage implements RpcExceptionOrBuilder { // Use RpcException.newBuilder() to construct. private RpcException(Builder builder) { super(builder); } private RpcException(boolean noInit) {} private static final RpcException defaultInstance; public static RpcException getDefaultInstance() { return defaultInstance; } public RpcException getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; } private int bitField0_; // required string exceptionName = 1; public static final int EXCEPTIONNAME_FIELD_NUMBER = 1; private java.lang.Object exceptionName_; public boolean hasExceptionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getExceptionName() { java.lang.Object ref = exceptionName_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { exceptionName_ = s; } return s; } } private com.google.protobuf.ByteString getExceptionNameBytes() { java.lang.Object ref = exceptionName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); exceptionName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string stackTrace = 2; public static final int STACKTRACE_FIELD_NUMBER = 2; private java.lang.Object stackTrace_; public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getStackTrace() { java.lang.Object ref = stackTrace_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { stackTrace_ = s; } return s; } } private com.google.protobuf.ByteString getStackTraceBytes() { java.lang.Object ref = stackTrace_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); stackTrace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { exceptionName_ = ""; stackTrace_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasExceptionName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getExceptionNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getStackTraceBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getExceptionNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getStackTraceBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) obj; boolean result = true; result = result && (hasExceptionName() == other.hasExceptionName()); if (hasExceptionName()) { result = result && getExceptionName() .equals(other.getExceptionName()); } result = result && (hasStackTrace() == other.hasStackTrace()); if (hasStackTrace()) { result = result && getStackTrace() .equals(other.getStackTrace()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExceptionName()) { hash = (37 * hash) + EXCEPTIONNAME_FIELD_NUMBER; hash = (53 * hash) + getExceptionName().hashCode(); } if (hasStackTrace()) { hash = (37 * hash) + STACKTRACE_FIELD_NUMBER; hash = (53 * hash) + getStackTrace().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcExceptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); exceptionName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); stackTrace_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.exceptionName_ = exceptionName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.stackTrace_ = stackTrace_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance()) return this; if (other.hasExceptionName()) { setExceptionName(other.getExceptionName()); } if (other.hasStackTrace()) { setStackTrace(other.getStackTrace()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasExceptionName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; exceptionName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; stackTrace_ = input.readBytes(); break; } } } } private int bitField0_; // required string exceptionName = 1; private java.lang.Object exceptionName_ = ""; public boolean hasExceptionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getExceptionName() { java.lang.Object ref = exceptionName_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); exceptionName_ = s; return s; } else { return (String) ref; } } public Builder setExceptionName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; exceptionName_ = value; onChanged(); return this; } public Builder clearExceptionName() { bitField0_ = (bitField0_ & ~0x00000001); exceptionName_ = getDefaultInstance().getExceptionName(); onChanged(); return this; } void setExceptionName(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000001; exceptionName_ = value; onChanged(); } // optional string stackTrace = 2; private java.lang.Object stackTrace_ = ""; public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getStackTrace() { java.lang.Object ref = stackTrace_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); stackTrace_ = s; return s; } else { return (String) ref; } } public Builder setStackTrace(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); return this; } public Builder clearStackTrace() { bitField0_ = (bitField0_ & ~0x00000002); stackTrace_ = getDefaultInstance().getStackTrace(); onChanged(); return this; } void setStackTrace(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); } // @@protoc_insertion_point(builder_scope:RpcException) } static { defaultInstance = new RpcException(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RpcException) } private static com.google.protobuf.Descriptors.Descriptor internal_static_UserInformation_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_UserInformation_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ConnectionHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ConnectionHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcRequestHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcRequestHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcRequestBody_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcRequestBody_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcResponseHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcResponseHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcResponseBody_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcResponseBody_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcException_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcException_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\tRPC.proto\032\rTracing.proto\":\n\017UserInform" + "ation\022\025\n\reffectiveUser\030\001 \002(\t\022\020\n\010realUser" + "\030\002 \001(\t\"w\n\020ConnectionHeader\022\"\n\010userInfo\030\001" + " \001(\0132\020.UserInformation\022?\n\010protocol\030\002 \001(\t" + ":-org.apache.hadoop.hbase.client.ClientP" + "rotocol\"<\n\020RpcRequestHeader\022\016\n\006callId\030\001 " + "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"n\n\016RpcRequ" + "estBody\022\022\n\nmethodName\030\001 \002(\t\022\035\n\025clientPro" + "tocolVersion\030\002 \001(\004\022\017\n\007request\030\003 \001(\014\022\030\n\020r" + "equestClassName\030\004 \001(\t\"{\n\021RpcResponseHead", "er\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 \002(\0162\031.Rpc" + "ResponseHeader.Status\"+\n\006Status\022\013\n\007SUCCE" + "SS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017RpcRespons" + "eBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcException\022" + "\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstackTrace\030\002 \001" + "(\tB<\n*org.apache.hadoop.hbase.protobuf.g" + "eneratedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_UserInformation_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_UserInformation_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UserInformation_descriptor, new java.lang.String[] { "EffectiveUser", "RealUser", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); internal_static_ConnectionHeader_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_ConnectionHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ConnectionHeader_descriptor, new java.lang.String[] { "UserInfo", "Protocol", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); internal_static_RpcRequestHeader_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_RpcRequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcRequestHeader_descriptor, new java.lang.String[] { "CallId", "Tinfo", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.Builder.class); internal_static_RpcRequestBody_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_RpcRequestBody_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcRequestBody_descriptor, new java.lang.String[] { "MethodName", "ClientProtocolVersion", "Request", "RequestClassName", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.Builder.class); internal_static_RpcResponseHeader_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_RpcResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcResponseHeader_descriptor, new java.lang.String[] { "CallId", "Status", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Builder.class); internal_static_RpcResponseBody_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_RpcResponseBody_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcResponseBody_descriptor, new java.lang.String[] { "Response", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.Builder.class); internal_static_RpcException_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_RpcException_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcException_descriptor, new java.lang.String[] { "ExceptionName", "StackTrace", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.Tracing.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }