// Generated by the protocol buffer compiler. DO NOT EDIT! // source: RPC.proto package org.apache.hadoop.hbase.protobuf.generated; public final class RPCProtos { private RPCProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface UserInformationOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string effective_user = 1; /** * <code>required string effective_user = 1;</code> */ boolean hasEffectiveUser(); /** * <code>required string effective_user = 1;</code> */ java.lang.String getEffectiveUser(); /** * <code>required string effective_user = 1;</code> */ com.google.protobuf.ByteString getEffectiveUserBytes(); // optional string real_user = 2; /** * <code>optional string real_user = 2;</code> */ boolean hasRealUser(); /** * <code>optional string real_user = 2;</code> */ java.lang.String getRealUser(); /** * <code>optional string real_user = 2;</code> */ com.google.protobuf.ByteString getRealUserBytes(); } /** * Protobuf type {@code hbase.pb.UserInformation} * * <pre> * User Information proto. Included in ConnectionHeader on connection setup * </pre> */ public static final class UserInformation extends com.google.protobuf.GeneratedMessage implements UserInformationOrBuilder { // Use UserInformation.newBuilder() to construct. private UserInformation(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private UserInformation(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final UserInformation defaultInstance; public static UserInformation getDefaultInstance() { return defaultInstance; } public UserInformation getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UserInformation( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; effectiveUser_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; realUser_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); } public static com.google.protobuf.Parser<UserInformation> PARSER = new com.google.protobuf.AbstractParser<UserInformation>() { public UserInformation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UserInformation(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<UserInformation> getParserForType() { return PARSER; } private int bitField0_; // required string effective_user = 1; public static final int EFFECTIVE_USER_FIELD_NUMBER = 1; private java.lang.Object effectiveUser_; /** * <code>required string effective_user = 1;</code> */ public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string effective_user = 1;</code> */ public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { effectiveUser_ = s; } return s; } } /** * <code>required string effective_user = 1;</code> */ public com.google.protobuf.ByteString getEffectiveUserBytes() { java.lang.Object ref = effectiveUser_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); effectiveUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string real_user = 2; public static final int REAL_USER_FIELD_NUMBER = 2; private java.lang.Object realUser_; /** * <code>optional string real_user = 2;</code> */ public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string real_user = 2;</code> */ public java.lang.String getRealUser() { java.lang.Object ref = realUser_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { realUser_ = s; } return s; } } /** * <code>optional string real_user = 2;</code> */ public com.google.protobuf.ByteString getRealUserBytes() { java.lang.Object ref = realUser_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); realUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { effectiveUser_ = ""; realUser_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasEffectiveUser()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getEffectiveUserBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getRealUserBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getEffectiveUserBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getRealUserBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) obj; boolean result = true; result = result && (hasEffectiveUser() == other.hasEffectiveUser()); if (hasEffectiveUser()) { result = result && getEffectiveUser() .equals(other.getEffectiveUser()); } result = result && (hasRealUser() == other.hasRealUser()); if (hasRealUser()) { result = result && getRealUser() .equals(other.getRealUser()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEffectiveUser()) { hash = (37 * hash) + EFFECTIVE_USER_FIELD_NUMBER; hash = (53 * hash) + getEffectiveUser().hashCode(); } if (hasRealUser()) { hash = (37 * hash) + REAL_USER_FIELD_NUMBER; hash = (53 * hash) + getRealUser().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.UserInformation} * * <pre> * User Information proto. Included in ConnectionHeader on connection setup * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); effectiveUser_ = ""; bitField0_ = (bitField0_ & ~0x00000001); realUser_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.effectiveUser_ = effectiveUser_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.realUser_ = realUser_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this; if (other.hasEffectiveUser()) { bitField0_ |= 0x00000001; effectiveUser_ = other.effectiveUser_; onChanged(); } if (other.hasRealUser()) { bitField0_ |= 0x00000002; realUser_ = other.realUser_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasEffectiveUser()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required string effective_user = 1; private java.lang.Object effectiveUser_ = ""; /** * <code>required string effective_user = 1;</code> */ public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string effective_user = 1;</code> */ public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); effectiveUser_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string effective_user = 1;</code> */ public com.google.protobuf.ByteString getEffectiveUserBytes() { java.lang.Object ref = effectiveUser_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); effectiveUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string effective_user = 1;</code> */ public Builder setEffectiveUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); return this; } /** * <code>required string effective_user = 1;</code> */ public Builder clearEffectiveUser() { bitField0_ = (bitField0_ & ~0x00000001); effectiveUser_ = getDefaultInstance().getEffectiveUser(); onChanged(); return this; } /** * <code>required string effective_user = 1;</code> */ public Builder setEffectiveUserBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); return this; } // optional string real_user = 2; private java.lang.Object realUser_ = ""; /** * <code>optional string real_user = 2;</code> */ public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string real_user = 2;</code> */ public java.lang.String getRealUser() { java.lang.Object ref = realUser_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); realUser_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string real_user = 2;</code> */ public com.google.protobuf.ByteString getRealUserBytes() { java.lang.Object ref = realUser_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); realUser_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string real_user = 2;</code> */ public Builder setRealUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; realUser_ = value; onChanged(); return this; } /** * <code>optional string real_user = 2;</code> */ public Builder clearRealUser() { bitField0_ = (bitField0_ & ~0x00000002); realUser_ = getDefaultInstance().getRealUser(); onChanged(); return this; } /** * <code>optional string real_user = 2;</code> */ public Builder setRealUserBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; realUser_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.UserInformation) } static { defaultInstance = new UserInformation(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.UserInformation) } public interface ConnectionHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .hbase.pb.UserInformation user_info = 1; /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ boolean hasUserInfo(); /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo(); /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); // optional string service_name = 2; /** * <code>optional string service_name = 2;</code> */ boolean hasServiceName(); /** * <code>optional string service_name = 2;</code> */ java.lang.String getServiceName(); /** * <code>optional string service_name = 2;</code> */ com.google.protobuf.ByteString getServiceNameBytes(); // optional string cell_block_codec_class = 3; /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ boolean hasCellBlockCodecClass(); /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ java.lang.String getCellBlockCodecClass(); /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ com.google.protobuf.ByteString getCellBlockCodecClassBytes(); // optional string cell_block_compressor_class = 4; /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ boolean hasCellBlockCompressorClass(); /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ java.lang.String getCellBlockCompressorClass(); /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ com.google.protobuf.ByteString getCellBlockCompressorClassBytes(); // optional .hbase.pb.VersionInfo version_info = 5; /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ boolean hasVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); } /** * Protobuf type {@code hbase.pb.ConnectionHeader} * * <pre> * This is sent on connection setup after the connection preamble is sent. * </pre> */ public static final class ConnectionHeader extends com.google.protobuf.GeneratedMessage implements ConnectionHeaderOrBuilder { // Use ConnectionHeader.newBuilder() to construct. private ConnectionHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ConnectionHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ConnectionHeader defaultInstance; public static ConnectionHeader getDefaultInstance() { return defaultInstance; } public ConnectionHeader getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ConnectionHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = userInfo_.toBuilder(); } userInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(userInfo_); userInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { bitField0_ |= 0x00000002; serviceName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; cellBlockCodecClass_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000008; cellBlockCompressorClass_ = input.readBytes(); break; } case 42: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = versionInfo_.toBuilder(); } versionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(versionInfo_); versionInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); } public static com.google.protobuf.Parser<ConnectionHeader> PARSER = new com.google.protobuf.AbstractParser<ConnectionHeader>() { public ConnectionHeader parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ConnectionHeader(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ConnectionHeader> getParserForType() { return PARSER; } private int bitField0_; // optional .hbase.pb.UserInformation user_info = 1; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_; /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { return userInfo_; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { return userInfo_; } // optional string service_name = 2; public static final int SERVICE_NAME_FIELD_NUMBER = 2; private java.lang.Object serviceName_; /** * <code>optional string service_name = 2;</code> */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string service_name = 2;</code> */ public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { serviceName_ = s; } return s; } } /** * <code>optional string service_name = 2;</code> */ public com.google.protobuf.ByteString getServiceNameBytes() { java.lang.Object ref = serviceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string cell_block_codec_class = 3; public static final int CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER = 3; private java.lang.Object cellBlockCodecClass_; /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellBlockCodecClass_ = s; } return s; } } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public com.google.protobuf.ByteString getCellBlockCodecClassBytes() { java.lang.Object ref = cellBlockCodecClass_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCodecClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string cell_block_compressor_class = 4; public static final int CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER = 4; private java.lang.Object cellBlockCompressorClass_; /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellBlockCompressorClass_ = s; } return s; } } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { java.lang.Object ref = cellBlockCompressorClass_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCompressorClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional .hbase.pb.VersionInfo version_info = 5; public static final int VERSION_INFO_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo versionInfo_; /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { return versionInfo_; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { return versionInfo_; } private void initFields() { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); serviceName_ = ""; cellBlockCodecClass_ = ""; cellBlockCompressorClass_ = ""; versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, userInfo_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getServiceNameBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getCellBlockCodecClassBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getCellBlockCompressorClassBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, versionInfo_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, userInfo_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getServiceNameBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getCellBlockCodecClassBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, getCellBlockCompressorClassBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, versionInfo_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) obj; boolean result = true; result = result && (hasUserInfo() == other.hasUserInfo()); if (hasUserInfo()) { result = result && getUserInfo() .equals(other.getUserInfo()); } result = result && (hasServiceName() == other.hasServiceName()); if (hasServiceName()) { result = result && getServiceName() .equals(other.getServiceName()); } result = result && (hasCellBlockCodecClass() == other.hasCellBlockCodecClass()); if (hasCellBlockCodecClass()) { result = result && getCellBlockCodecClass() .equals(other.getCellBlockCodecClass()); } result = result && (hasCellBlockCompressorClass() == other.hasCellBlockCompressorClass()); if (hasCellBlockCompressorClass()) { result = result && getCellBlockCompressorClass() .equals(other.getCellBlockCompressorClass()); } result = result && (hasVersionInfo() == other.hasVersionInfo()); if (hasVersionInfo()) { result = result && getVersionInfo() .equals(other.getVersionInfo()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUserInfo()) { hash = (37 * hash) + USER_INFO_FIELD_NUMBER; hash = (53 * hash) + getUserInfo().hashCode(); } if (hasServiceName()) { hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER; hash = (53 * hash) + getServiceName().hashCode(); } if (hasCellBlockCodecClass()) { hash = (37 * hash) + CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER; hash = (53 * hash) + getCellBlockCodecClass().hashCode(); } if (hasCellBlockCompressorClass()) { hash = (37 * hash) + CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER; hash = (53 * hash) + getCellBlockCompressorClass().hashCode(); } if (hasVersionInfo()) { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ConnectionHeader} * * <pre> * This is sent on connection setup after the connection preamble is sent. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getVersionInfoFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); serviceName_ = ""; bitField0_ = (bitField0_ & ~0x00000002); cellBlockCodecClass_ = ""; bitField0_ = (bitField0_ & ~0x00000004); cellBlockCompressorClass_ = ""; bitField0_ = (bitField0_ & ~0x00000008); if (versionInfoBuilder_ == null) { versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (userInfoBuilder_ == null) { result.userInfo_ = userInfo_; } else { result.userInfo_ = userInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.serviceName_ = serviceName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.cellBlockCodecClass_ = cellBlockCodecClass_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.cellBlockCompressorClass_ = cellBlockCompressorClass_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (versionInfoBuilder_ == null) { result.versionInfo_ = versionInfo_; } else { result.versionInfo_ = versionInfoBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this; if (other.hasUserInfo()) { mergeUserInfo(other.getUserInfo()); } if (other.hasServiceName()) { bitField0_ |= 0x00000002; serviceName_ = other.serviceName_; onChanged(); } if (other.hasCellBlockCodecClass()) { bitField0_ |= 0x00000004; cellBlockCodecClass_ = other.cellBlockCodecClass_; onChanged(); } if (other.hasCellBlockCompressorClass()) { bitField0_ |= 0x00000008; cellBlockCompressorClass_ = other.cellBlockCompressorClass_; onChanged(); } if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { return false; } } if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional .hbase.pb.UserInformation user_info = 1; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { return userInfo_; } else { return userInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userInfo_ = value; onChanged(); } else { userInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder setUserInfo( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) { if (userInfoBuilder_ == null) { userInfo_ = builderForValue.build(); onChanged(); } else { userInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); } else { userInfo_ = value; } onChanged(); } else { userInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); onChanged(); } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUserInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { return userInfo_; } } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>( userInfo_, getParentForChildren(), isClean()); userInfo_ = null; } return userInfoBuilder_; } // optional string service_name = 2; private java.lang.Object serviceName_ = ""; /** * <code>optional string service_name = 2;</code> */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string service_name = 2;</code> */ public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); serviceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string service_name = 2;</code> */ public com.google.protobuf.ByteString getServiceNameBytes() { java.lang.Object ref = serviceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string service_name = 2;</code> */ public Builder setServiceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); return this; } /** * <code>optional string service_name = 2;</code> */ public Builder clearServiceName() { bitField0_ = (bitField0_ & ~0x00000002); serviceName_ = getDefaultInstance().getServiceName(); onChanged(); return this; } /** * <code>optional string service_name = 2;</code> */ public Builder setServiceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); return this; } // optional string cell_block_codec_class = 3; private java.lang.Object cellBlockCodecClass_ = ""; /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); cellBlockCodecClass_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public com.google.protobuf.ByteString getCellBlockCodecClassBytes() { java.lang.Object ref = cellBlockCodecClass_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCodecClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public Builder setCellBlockCodecClass( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; cellBlockCodecClass_ = value; onChanged(); return this; } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public Builder clearCellBlockCodecClass() { bitField0_ = (bitField0_ & ~0x00000004); cellBlockCodecClass_ = getDefaultInstance().getCellBlockCodecClass(); onChanged(); return this; } /** * <code>optional string cell_block_codec_class = 3;</code> * * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> */ public Builder setCellBlockCodecClassBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; cellBlockCodecClass_ = value; onChanged(); return this; } // optional string cell_block_compressor_class = 4; private java.lang.Object cellBlockCompressorClass_ = ""; /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); cellBlockCompressorClass_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { java.lang.Object ref = cellBlockCompressorClass_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCompressorClass_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public Builder setCellBlockCompressorClass( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; cellBlockCompressorClass_ = value; onChanged(); return this; } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public Builder clearCellBlockCompressorClass() { bitField0_ = (bitField0_ & ~0x00000008); cellBlockCompressorClass_ = getDefaultInstance().getCellBlockCompressorClass(); onChanged(); return this; } /** * <code>optional string cell_block_compressor_class = 4;</code> * * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> */ public Builder setCellBlockCompressorClassBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; cellBlockCompressorClass_ = value; onChanged(); return this; } // optional .hbase.pb.VersionInfo version_info = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_; /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { if (versionInfoBuilder_ == null) { return versionInfo_; } else { return versionInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder setVersionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } versionInfo_ = value; onChanged(); } else { versionInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder setVersionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder builderForValue) { if (versionInfoBuilder_ == null) { versionInfo_ = builderForValue.build(); onChanged(); } else { versionInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder mergeVersionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && versionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) { versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial(); } else { versionInfo_ = value; } onChanged(); } else { versionInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder clearVersionInfo() { if (versionInfoBuilder_ == null) { versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); onChanged(); } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder getVersionInfoBuilder() { bitField0_ |= 0x00000010; onChanged(); return getVersionInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { if (versionInfoBuilder_ != null) { return versionInfoBuilder_.getMessageOrBuilder(); } else { return versionInfo_; } } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> getVersionInfoFieldBuilder() { if (versionInfoBuilder_ == null) { versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>( versionInfo_, getParentForChildren(), isClean()); versionInfo_ = null; } return versionInfoBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeader) } static { defaultInstance = new ConnectionHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader) } public interface CellBlockMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional uint32 length = 1; /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ boolean hasLength(); /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ int getLength(); } /** * Protobuf type {@code hbase.pb.CellBlockMeta} * * <pre> * Optional Cell block Message. Included in client RequestHeader * </pre> */ public static final class CellBlockMeta extends com.google.protobuf.GeneratedMessage implements CellBlockMetaOrBuilder { // Use CellBlockMeta.newBuilder() to construct. private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CellBlockMeta defaultInstance; public static CellBlockMeta getDefaultInstance() { return defaultInstance; } public CellBlockMeta getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CellBlockMeta( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; length_ = input.readUInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } public static com.google.protobuf.Parser<CellBlockMeta> PARSER = new com.google.protobuf.AbstractParser<CellBlockMeta>() { public CellBlockMeta parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CellBlockMeta(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CellBlockMeta> getParserForType() { return PARSER; } private int bitField0_; // optional uint32 length = 1; public static final int LENGTH_FIELD_NUMBER = 1; private int length_; /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ public int getLength() { return length_; } private void initFields() { length_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, length_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, length_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) obj; boolean result = true; result = result && (hasLength() == other.hasLength()); if (hasLength()) { result = result && (getLength() == other.getLength()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasLength()) { hash = (37 * hash) + LENGTH_FIELD_NUMBER; hash = (53 * hash) + getLength(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CellBlockMeta} * * <pre> * Optional Cell block Message. Included in client RequestHeader * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); length_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.length_ = length_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; if (other.hasLength()) { setLength(other.getLength()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional uint32 length = 1; private int length_ ; /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ public int getLength() { return length_; } /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ public Builder setLength(int value) { bitField0_ |= 0x00000001; length_ = value; onChanged(); return this; } /** * <code>optional uint32 length = 1;</code> * * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> */ public Builder clearLength() { bitField0_ = (bitField0_ & ~0x00000001); length_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) } static { defaultInstance = new CellBlockMeta(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) } public interface ExceptionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string exception_class_name = 1; /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ boolean hasExceptionClassName(); /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ java.lang.String getExceptionClassName(); /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ com.google.protobuf.ByteString getExceptionClassNameBytes(); // optional string stack_trace = 2; /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ boolean hasStackTrace(); /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ java.lang.String getStackTrace(); /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ com.google.protobuf.ByteString getStackTraceBytes(); // optional string hostname = 3; /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ boolean hasHostname(); /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ java.lang.String getHostname(); /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ com.google.protobuf.ByteString getHostnameBytes(); // optional int32 port = 4; /** * <code>optional int32 port = 4;</code> */ boolean hasPort(); /** * <code>optional int32 port = 4;</code> */ int getPort(); // optional bool do_not_retry = 5; /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ boolean hasDoNotRetry(); /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ boolean getDoNotRetry(); } /** * Protobuf type {@code hbase.pb.ExceptionResponse} * * <pre> * At the RPC layer, this message is used to carry * the server side exception to the RPC client. * </pre> */ public static final class ExceptionResponse extends com.google.protobuf.GeneratedMessage implements ExceptionResponseOrBuilder { // Use ExceptionResponse.newBuilder() to construct. private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ExceptionResponse defaultInstance; public static ExceptionResponse getDefaultInstance() { return defaultInstance; } public ExceptionResponse getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExceptionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; exceptionClassName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; stackTrace_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; hostname_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; port_ = input.readInt32(); break; } case 40: { bitField0_ |= 0x00000010; doNotRetry_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); } public static com.google.protobuf.Parser<ExceptionResponse> PARSER = new com.google.protobuf.AbstractParser<ExceptionResponse>() { public ExceptionResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ExceptionResponse(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ExceptionResponse> getParserForType() { return PARSER; } private int bitField0_; // optional string exception_class_name = 1; public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; private java.lang.Object exceptionClassName_; /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exceptionClassName_ = s; } return s; } } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public com.google.protobuf.ByteString getExceptionClassNameBytes() { java.lang.Object ref = exceptionClassName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exceptionClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string stack_trace = 2; public static final int STACK_TRACE_FIELD_NUMBER = 2; private java.lang.Object stackTrace_; /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { stackTrace_ = s; } return s; } } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public com.google.protobuf.ByteString getStackTraceBytes() { java.lang.Object ref = stackTrace_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); stackTrace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string hostname = 3; public static final int HOSTNAME_FIELD_NUMBER = 3; private java.lang.Object hostname_; /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostname_ = s; } return s; } } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public com.google.protobuf.ByteString getHostnameBytes() { java.lang.Object ref = hostname_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostname_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional int32 port = 4; public static final int PORT_FIELD_NUMBER = 4; private int port_; /** * <code>optional int32 port = 4;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional int32 port = 4;</code> */ public int getPort() { return port_; } // optional bool do_not_retry = 5; public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; private boolean doNotRetry_; /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ public boolean getDoNotRetry() { return doNotRetry_; } private void initFields() { exceptionClassName_ = ""; stackTrace_ = ""; hostname_ = ""; port_ = 0; doNotRetry_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getExceptionClassNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getStackTraceBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getHostnameBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeInt32(4, port_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, doNotRetry_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getExceptionClassNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getStackTraceBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getHostnameBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(4, port_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, doNotRetry_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; boolean result = true; result = result && (hasExceptionClassName() == other.hasExceptionClassName()); if (hasExceptionClassName()) { result = result && getExceptionClassName() .equals(other.getExceptionClassName()); } result = result && (hasStackTrace() == other.hasStackTrace()); if (hasStackTrace()) { result = result && getStackTrace() .equals(other.getStackTrace()); } result = result && (hasHostname() == other.hasHostname()); if (hasHostname()) { result = result && getHostname() .equals(other.getHostname()); } result = result && (hasPort() == other.hasPort()); if (hasPort()) { result = result && (getPort() == other.getPort()); } result = result && (hasDoNotRetry() == other.hasDoNotRetry()); if (hasDoNotRetry()) { result = result && (getDoNotRetry() == other.getDoNotRetry()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExceptionClassName()) { hash = (37 * hash) + EXCEPTION_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getExceptionClassName().hashCode(); } if (hasStackTrace()) { hash = (37 * hash) + STACK_TRACE_FIELD_NUMBER; hash = (53 * hash) + getStackTrace().hashCode(); } if (hasHostname()) { hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; hash = (53 * hash) + getHostname().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } if (hasDoNotRetry()) { hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getDoNotRetry()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ExceptionResponse} * * <pre> * At the RPC layer, this message is used to carry * the server side exception to the RPC client. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); exceptionClassName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); stackTrace_ = ""; bitField0_ = (bitField0_ & ~0x00000002); hostname_ = ""; bitField0_ = (bitField0_ & ~0x00000004); port_ = 0; bitField0_ = (bitField0_ & ~0x00000008); doNotRetry_ = false; bitField0_ = (bitField0_ & ~0x00000010); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.exceptionClassName_ = exceptionClassName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.stackTrace_ = stackTrace_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.hostname_ = hostname_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.port_ = port_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.doNotRetry_ = doNotRetry_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; if (other.hasExceptionClassName()) { bitField0_ |= 0x00000001; exceptionClassName_ = other.exceptionClassName_; onChanged(); } if (other.hasStackTrace()) { bitField0_ |= 0x00000002; stackTrace_ = other.stackTrace_; onChanged(); } if (other.hasHostname()) { bitField0_ |= 0x00000004; hostname_ = other.hostname_; onChanged(); } if (other.hasPort()) { setPort(other.getPort()); } if (other.hasDoNotRetry()) { setDoNotRetry(other.getDoNotRetry()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional string exception_class_name = 1; private java.lang.Object exceptionClassName_ = ""; /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); exceptionClassName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public com.google.protobuf.ByteString getExceptionClassNameBytes() { java.lang.Object ref = exceptionClassName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exceptionClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public Builder setExceptionClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; exceptionClassName_ = value; onChanged(); return this; } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public Builder clearExceptionClassName() { bitField0_ = (bitField0_ & ~0x00000001); exceptionClassName_ = getDefaultInstance().getExceptionClassName(); onChanged(); return this; } /** * <code>optional string exception_class_name = 1;</code> * * <pre> * Class name of the exception thrown from the server * </pre> */ public Builder setExceptionClassNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; exceptionClassName_ = value; onChanged(); return this; } // optional string stack_trace = 2; private java.lang.Object stackTrace_ = ""; /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); stackTrace_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public com.google.protobuf.ByteString getStackTraceBytes() { java.lang.Object ref = stackTrace_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); stackTrace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public Builder setStackTrace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); return this; } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public Builder clearStackTrace() { bitField0_ = (bitField0_ & ~0x00000002); stackTrace_ = getDefaultInstance().getStackTrace(); onChanged(); return this; } /** * <code>optional string stack_trace = 2;</code> * * <pre> * Exception stack trace from the server side * </pre> */ public Builder setStackTraceBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); return this; } // optional string hostname = 3; private java.lang.Object hostname_ = ""; /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); hostname_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public com.google.protobuf.ByteString getHostnameBytes() { java.lang.Object ref = hostname_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostname_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public Builder setHostname( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; hostname_ = value; onChanged(); return this; } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public Builder clearHostname() { bitField0_ = (bitField0_ & ~0x00000004); hostname_ = getDefaultInstance().getHostname(); onChanged(); return this; } /** * <code>optional string hostname = 3;</code> * * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> */ public Builder setHostnameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; hostname_ = value; onChanged(); return this; } // optional int32 port = 4; private int port_ ; /** * <code>optional int32 port = 4;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional int32 port = 4;</code> */ public int getPort() { return port_; } /** * <code>optional int32 port = 4;</code> */ public Builder setPort(int value) { bitField0_ |= 0x00000008; port_ = value; onChanged(); return this; } /** * <code>optional int32 port = 4;</code> */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000008); port_ = 0; onChanged(); return this; } // optional bool do_not_retry = 5; private boolean doNotRetry_ ; /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ public boolean getDoNotRetry() { return doNotRetry_; } /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ public Builder setDoNotRetry(boolean value) { bitField0_ |= 0x00000010; doNotRetry_ = value; onChanged(); return this; } /** * <code>optional bool do_not_retry = 5;</code> * * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> */ public Builder clearDoNotRetry() { bitField0_ = (bitField0_ & ~0x00000010); doNotRetry_ = false; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) } static { defaultInstance = new ExceptionResponse(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) } public interface RequestHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional uint32 call_id = 1; /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ boolean hasCallId(); /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ int getCallId(); // optional .hbase.pb.RPCTInfo trace_info = 2; /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ boolean hasTraceInfo(); /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo(); /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder(); // optional string method_name = 3; /** * <code>optional string method_name = 3;</code> */ boolean hasMethodName(); /** * <code>optional string method_name = 3;</code> */ java.lang.String getMethodName(); /** * <code>optional string method_name = 3;</code> */ com.google.protobuf.ByteString getMethodNameBytes(); // optional bool request_param = 4; /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ boolean hasRequestParam(); /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ boolean getRequestParam(); // optional .hbase.pb.CellBlockMeta cell_block_meta = 5; /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ boolean hasCellBlockMeta(); /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); // optional uint32 priority = 6; /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ boolean hasPriority(); /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ int getPriority(); // optional uint32 timeout = 7; /** * <code>optional uint32 timeout = 7;</code> */ boolean hasTimeout(); /** * <code>optional uint32 timeout = 7;</code> */ int getTimeout(); } /** * Protobuf type {@code hbase.pb.RequestHeader} * * <pre> * Header sent making a request. * </pre> */ public static final class RequestHeader extends com.google.protobuf.GeneratedMessage implements RequestHeaderOrBuilder { // Use RequestHeader.newBuilder() to construct. private RequestHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RequestHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RequestHeader defaultInstance; public static RequestHeader getDefaultInstance() { return defaultInstance; } public RequestHeader getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RequestHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; callId_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = traceInfo_.toBuilder(); } traceInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(traceInfo_); traceInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { bitField0_ |= 0x00000004; methodName_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; requestParam_ = input.readBool(); break; } case 42: { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = cellBlockMeta_.toBuilder(); } cellBlockMeta_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(cellBlockMeta_); cellBlockMeta_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 48: { bitField0_ |= 0x00000020; priority_ = input.readUInt32(); break; } case 56: { bitField0_ |= 0x00000040; timeout_ = input.readUInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.Builder.class); } public static com.google.protobuf.Parser<RequestHeader> PARSER = new com.google.protobuf.AbstractParser<RequestHeader>() { public RequestHeader parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RequestHeader(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RequestHeader> getParserForType() { return PARSER; } private int bitField0_; // optional uint32 call_id = 1; public static final int CALL_ID_FIELD_NUMBER = 1; private int callId_; /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ public int getCallId() { return callId_; } // optional .hbase.pb.RPCTInfo trace_info = 2; public static final int TRACE_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo traceInfo_; /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { return traceInfo_; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { return traceInfo_; } // optional string method_name = 3; public static final int METHOD_NAME_FIELD_NUMBER = 3; private java.lang.Object methodName_; /** * <code>optional string method_name = 3;</code> */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string method_name = 3;</code> */ public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { methodName_ = s; } return s; } } /** * <code>optional string method_name = 3;</code> */ public com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bool request_param = 4; public static final int REQUEST_PARAM_FIELD_NUMBER = 4; private boolean requestParam_; /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ public boolean getRequestParam() { return requestParam_; } // optional .hbase.pb.CellBlockMeta cell_block_meta = 5; public static final int CELL_BLOCK_META_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { return cellBlockMeta_; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { return cellBlockMeta_; } // optional uint32 priority = 6; public static final int PRIORITY_FIELD_NUMBER = 6; private int priority_; /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ public int getPriority() { return priority_; } // optional uint32 timeout = 7; public static final int TIMEOUT_FIELD_NUMBER = 7; private int timeout_; /** * <code>optional uint32 timeout = 7;</code> */ public boolean hasTimeout() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 timeout = 7;</code> */ public int getTimeout() { return timeout_; } private void initFields() { callId_ = 0; traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); methodName_ = ""; requestParam_ = false; cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); priority_ = 0; timeout_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, traceInfo_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getMethodNameBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, requestParam_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, cellBlockMeta_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(6, priority_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(7, timeout_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, traceInfo_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getMethodNameBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, requestParam_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, cellBlockMeta_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(6, priority_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(7, timeout_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) obj; boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { result = result && (getCallId() == other.getCallId()); } result = result && (hasTraceInfo() == other.hasTraceInfo()); if (hasTraceInfo()) { result = result && getTraceInfo() .equals(other.getTraceInfo()); } result = result && (hasMethodName() == other.hasMethodName()); if (hasMethodName()) { result = result && getMethodName() .equals(other.getMethodName()); } result = result && (hasRequestParam() == other.hasRequestParam()); if (hasRequestParam()) { result = result && (getRequestParam() == other.getRequestParam()); } result = result && (hasCellBlockMeta() == other.hasCellBlockMeta()); if (hasCellBlockMeta()) { result = result && getCellBlockMeta() .equals(other.getCellBlockMeta()); } result = result && (hasPriority() == other.hasPriority()); if (hasPriority()) { result = result && (getPriority() == other.getPriority()); } result = result && (hasTimeout() == other.hasTimeout()); if (hasTimeout()) { result = result && (getTimeout() == other.getTimeout()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCallId()) { hash = (37 * hash) + CALL_ID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } if (hasTraceInfo()) { hash = (37 * hash) + TRACE_INFO_FIELD_NUMBER; hash = (53 * hash) + getTraceInfo().hashCode(); } if (hasMethodName()) { hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER; hash = (53 * hash) + getMethodName().hashCode(); } if (hasRequestParam()) { hash = (37 * hash) + REQUEST_PARAM_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getRequestParam()); } if (hasCellBlockMeta()) { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; hash = (53 * hash) + getCellBlockMeta().hashCode(); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority(); } if (hasTimeout()) { hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getTimeout(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RequestHeader} * * <pre> * Header sent making a request. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTraceInfoFieldBuilder(); getCellBlockMetaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (traceInfoBuilder_ == null) { traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); } else { traceInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); methodName_ = ""; bitField0_ = (bitField0_ & ~0x00000004); requestParam_ = false; bitField0_ = (bitField0_ & ~0x00000008); if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); priority_ = 0; bitField0_ = (bitField0_ & ~0x00000020); timeout_ = 0; bitField0_ = (bitField0_ & ~0x00000040); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (traceInfoBuilder_ == null) { result.traceInfo_ = traceInfo_; } else { result.traceInfo_ = traceInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.methodName_ = methodName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.requestParam_ = requestParam_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (cellBlockMetaBuilder_ == null) { result.cellBlockMeta_ = cellBlockMeta_; } else { result.cellBlockMeta_ = cellBlockMetaBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.priority_ = priority_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.timeout_ = timeout_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance()) return this; if (other.hasCallId()) { setCallId(other.getCallId()); } if (other.hasTraceInfo()) { mergeTraceInfo(other.getTraceInfo()); } if (other.hasMethodName()) { bitField0_ |= 0x00000004; methodName_ = other.methodName_; onChanged(); } if (other.hasRequestParam()) { setRequestParam(other.getRequestParam()); } if (other.hasCellBlockMeta()) { mergeCellBlockMeta(other.getCellBlockMeta()); } if (other.hasPriority()) { setPriority(other.getPriority()); } if (other.hasTimeout()) { setTimeout(other.getTimeout()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional uint32 call_id = 1; private int callId_ ; /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ public int getCallId() { return callId_; } /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } /** * <code>optional uint32 call_id = 1;</code> * * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } // optional .hbase.pb.RPCTInfo trace_info = 2; private org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> traceInfoBuilder_; /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { if (traceInfoBuilder_ == null) { return traceInfo_; } else { return traceInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder setTraceInfo(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } traceInfo_ = value; onChanged(); } else { traceInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder setTraceInfo( org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder builderForValue) { if (traceInfoBuilder_ == null) { traceInfo_ = builderForValue.build(); onChanged(); } else { traceInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder mergeTraceInfo(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && traceInfo_ != org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance()) { traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.newBuilder(traceInfo_).mergeFrom(value).buildPartial(); } else { traceInfo_ = value; } onChanged(); } else { traceInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder clearTraceInfo() { if (traceInfoBuilder_ == null) { traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); onChanged(); } else { traceInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder getTraceInfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTraceInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { if (traceInfoBuilder_ != null) { return traceInfoBuilder_.getMessageOrBuilder(); } else { return traceInfo_; } } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> getTraceInfoFieldBuilder() { if (traceInfoBuilder_ == null) { traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder>( traceInfo_, getParentForChildren(), isClean()); traceInfo_ = null; } return traceInfoBuilder_; } // optional string method_name = 3; private java.lang.Object methodName_ = ""; /** * <code>optional string method_name = 3;</code> */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string method_name = 3;</code> */ public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); methodName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string method_name = 3;</code> */ public com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string method_name = 3;</code> */ public Builder setMethodName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; methodName_ = value; onChanged(); return this; } /** * <code>optional string method_name = 3;</code> */ public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000004); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } /** * <code>optional string method_name = 3;</code> */ public Builder setMethodNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; methodName_ = value; onChanged(); return this; } // optional bool request_param = 4; private boolean requestParam_ ; /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ public boolean getRequestParam() { return requestParam_; } /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ public Builder setRequestParam(boolean value) { bitField0_ |= 0x00000008; requestParam_ = value; onChanged(); return this; } /** * <code>optional bool request_param = 4;</code> * * <pre> * If true, then a pb Message param follows. * </pre> */ public Builder clearRequestParam() { bitField0_ = (bitField0_ & ~0x00000008); requestParam_ = false; onChanged(); return this; } // optional .hbase.pb.CellBlockMeta cell_block_meta = 5; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { return cellBlockMeta_; } else { return cellBlockMetaBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cellBlockMeta_ = value; onChanged(); } else { cellBlockMetaBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = builderForValue.build(); onChanged(); } else { cellBlockMetaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && cellBlockMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(cellBlockMeta_).mergeFrom(value).buildPartial(); } else { cellBlockMeta_ = value; } onChanged(); } else { cellBlockMetaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); onChanged(); } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000010; onChanged(); return getCellBlockMetaFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); } else { return cellBlockMeta_; } } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { if (cellBlockMetaBuilder_ == null) { cellBlockMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder>( cellBlockMeta_, getParentForChildren(), isClean()); cellBlockMeta_ = null; } return cellBlockMetaBuilder_; } // optional uint32 priority = 6; private int priority_ ; /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ public int getPriority() { return priority_; } /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ public Builder setPriority(int value) { bitField0_ |= 0x00000020; priority_ = value; onChanged(); return this; } /** * <code>optional uint32 priority = 6;</code> * * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000020); priority_ = 0; onChanged(); return this; } // optional uint32 timeout = 7; private int timeout_ ; /** * <code>optional uint32 timeout = 7;</code> */ public boolean hasTimeout() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 timeout = 7;</code> */ public int getTimeout() { return timeout_; } /** * <code>optional uint32 timeout = 7;</code> */ public Builder setTimeout(int value) { bitField0_ |= 0x00000040; timeout_ = value; onChanged(); return this; } /** * <code>optional uint32 timeout = 7;</code> */ public Builder clearTimeout() { bitField0_ = (bitField0_ & ~0x00000040); timeout_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.RequestHeader) } static { defaultInstance = new RequestHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RequestHeader) } public interface ResponseHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional uint32 call_id = 1; /** * <code>optional uint32 call_id = 1;</code> */ boolean hasCallId(); /** * <code>optional uint32 call_id = 1;</code> */ int getCallId(); // optional .hbase.pb.ExceptionResponse exception = 2; /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ boolean hasException(); /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException(); /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder(); // optional .hbase.pb.CellBlockMeta cell_block_meta = 3; /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ boolean hasCellBlockMeta(); /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); } /** * Protobuf type {@code hbase.pb.ResponseHeader} */ public static final class ResponseHeader extends com.google.protobuf.GeneratedMessage implements ResponseHeaderOrBuilder { // Use ResponseHeader.newBuilder() to construct. private ResponseHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ResponseHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ResponseHeader defaultInstance; public static ResponseHeader getDefaultInstance() { return defaultInstance; } public ResponseHeader getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ResponseHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; callId_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = exception_.toBuilder(); } exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(exception_); exception_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = cellBlockMeta_.toBuilder(); } cellBlockMeta_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(cellBlockMeta_); cellBlockMeta_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); } public static com.google.protobuf.Parser<ResponseHeader> PARSER = new com.google.protobuf.AbstractParser<ResponseHeader>() { public ResponseHeader parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ResponseHeader(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ResponseHeader> getParserForType() { return PARSER; } private int bitField0_; // optional uint32 call_id = 1; public static final int CALL_ID_FIELD_NUMBER = 1; private int callId_; /** * <code>optional uint32 call_id = 1;</code> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 call_id = 1;</code> */ public int getCallId() { return callId_; } // optional .hbase.pb.ExceptionResponse exception = 2; public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse exception_; /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException() { return exception_; } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { return exception_; } // optional .hbase.pb.CellBlockMeta cell_block_meta = 3; public static final int CELL_BLOCK_META_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { return cellBlockMeta_; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { return cellBlockMeta_; } private void initFields() { callId_ = 0; exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, exception_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, cellBlockMeta_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, exception_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, cellBlockMeta_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) obj; boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { result = result && (getCallId() == other.getCallId()); } result = result && (hasException() == other.hasException()); if (hasException()) { result = result && getException() .equals(other.getException()); } result = result && (hasCellBlockMeta() == other.hasCellBlockMeta()); if (hasCellBlockMeta()) { result = result && getCellBlockMeta() .equals(other.getCellBlockMeta()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasCallId()) { hash = (37 * hash) + CALL_ID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } if (hasException()) { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } if (hasCellBlockMeta()) { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; hash = (53 * hash) + getCellBlockMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ResponseHeader} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getExceptionFieldBuilder(); getCellBlockMetaFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (exceptionBuilder_ == null) { exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader build() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (exceptionBuilder_ == null) { result.exception_ = exception_; } else { result.exception_ = exceptionBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (cellBlockMetaBuilder_ == null) { result.cellBlockMeta_ = cellBlockMeta_; } else { result.cellBlockMeta_ = cellBlockMetaBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance()) return this; if (other.hasCallId()) { setCallId(other.getCallId()); } if (other.hasException()) { mergeException(other.getException()); } if (other.hasCellBlockMeta()) { mergeCellBlockMeta(other.getCellBlockMeta()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional uint32 call_id = 1; private int callId_ ; /** * <code>optional uint32 call_id = 1;</code> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 call_id = 1;</code> */ public int getCallId() { return callId_; } /** * <code>optional uint32 call_id = 1;</code> */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } /** * <code>optional uint32 call_id = 1;</code> */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } // optional .hbase.pb.ExceptionResponse exception = 2; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> exceptionBuilder_; /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException() { if (exceptionBuilder_ == null) { return exception_; } else { return exceptionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public Builder setException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exception_ = value; onChanged(); } else { exceptionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public Builder setException( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder builderForValue) { if (exceptionBuilder_ == null) { exception_ = builderForValue.build(); onChanged(); } else { exceptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && exception_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder(exception_).mergeFrom(value).buildPartial(); } else { exception_ = value; } onChanged(); } else { exceptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); onChanged(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExceptionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { return exception_; } } /** * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> * * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder>( exception_, getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } // optional .hbase.pb.CellBlockMeta cell_block_meta = 3; private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { return cellBlockMeta_; } else { return cellBlockMetaBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cellBlockMeta_ = value; onChanged(); } else { cellBlockMetaBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = builderForValue.build(); onChanged(); } else { cellBlockMetaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && cellBlockMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(cellBlockMeta_).mergeFrom(value).buildPartial(); } else { cellBlockMeta_ = value; } onChanged(); } else { cellBlockMetaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); onChanged(); } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000004; onChanged(); return getCellBlockMetaFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); } else { return cellBlockMeta_; } } /** * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> * * <pre> * If present, then an encoded data block follows. * </pre> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { if (cellBlockMetaBuilder_ == null) { cellBlockMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder>( cellBlockMeta_, getParentForChildren(), isClean()); cellBlockMeta_ = null; } return cellBlockMetaBuilder_; } // @@protoc_insertion_point(builder_scope:hbase.pb.ResponseHeader) } static { defaultInstance = new ResponseHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.ResponseHeader) } private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UserInformation_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_UserInformation_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ConnectionHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellBlockMeta_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ExceptionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RequestHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RequestHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ResponseHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ResponseHeader_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\tRPC.proto\022\010hbase.pb\032\rTracing.proto\032\013HB" + "ase.proto\"<\n\017UserInformation\022\026\n\016effectiv" + "e_user\030\001 \002(\t\022\021\n\treal_user\030\002 \001(\t\"\310\001\n\020Conn" + "ectionHeader\022,\n\tuser_info\030\001 \001(\0132\031.hbase." + "pb.UserInformation\022\024\n\014service_name\030\002 \001(\t" + "\022\036\n\026cell_block_codec_class\030\003 \001(\t\022#\n\033cell" + "_block_compressor_class\030\004 \001(\t\022+\n\014version" + "_info\030\005 \001(\0132\025.hbase.pb.VersionInfo\"\037\n\rCe" + "llBlockMeta\022\016\n\006length\030\001 \001(\r\"|\n\021Exception" + "Response\022\034\n\024exception_class_name\030\001 \001(\t\022\023", "\n\013stack_trace\030\002 \001(\t\022\020\n\010hostname\030\003 \001(\t\022\014\n" + "\004port\030\004 \001(\005\022\024\n\014do_not_retry\030\005 \001(\010\"\311\001\n\rRe" + "questHeader\022\017\n\007call_id\030\001 \001(\r\022&\n\ntrace_in" + "fo\030\002 \001(\0132\022.hbase.pb.RPCTInfo\022\023\n\013method_n" + "ame\030\003 \001(\t\022\025\n\rrequest_param\030\004 \001(\010\0220\n\017cell" + "_block_meta\030\005 \001(\0132\027.hbase.pb.CellBlockMe" + "ta\022\020\n\010priority\030\006 \001(\r\022\017\n\007timeout\030\007 \001(\r\"\203\001" + "\n\016ResponseHeader\022\017\n\007call_id\030\001 \001(\r\022.\n\texc" + "eption\030\002 \001(\0132\033.hbase.pb.ExceptionRespons" + "e\0220\n\017cell_block_meta\030\003 \001(\0132\027.hbase.pb.Ce", "llBlockMetaB<\n*org.apache.hadoop.hbase.p" + "rotobuf.generatedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_hbase_pb_UserInformation_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_UserInformation_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_UserInformation_descriptor, new java.lang.String[] { "EffectiveUser", "RealUser", }); internal_static_hbase_pb_ConnectionHeader_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ConnectionHeader_descriptor, new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", }); internal_static_hbase_pb_CellBlockMeta_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CellBlockMeta_descriptor, new java.lang.String[] { "Length", }); internal_static_hbase_pb_ExceptionResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ExceptionResponse_descriptor, new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); internal_static_hbase_pb_RequestHeader_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_RequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RequestHeader_descriptor, new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", "Timeout", }); internal_static_hbase_pb_ResponseHeader_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_ResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ResponseHeader_descriptor, new java.lang.String[] { "CallId", "Exception", "CellBlockMeta", }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.TracingProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }