// Generated by the protocol buffer compiler. DO NOT EDIT! // source: RPC.proto package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class RPCProtos { private RPCProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } public interface UserInformationOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.UserInformation) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string effective_user = 1;</code> */ boolean hasEffectiveUser(); /** * <code>required string effective_user = 1;</code> */ java.lang.String getEffectiveUser(); /** * <code>required string effective_user = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEffectiveUserBytes(); /** * <code>optional string real_user = 2;</code> */ boolean hasRealUser(); /** * <code>optional string real_user = 2;</code> */ java.lang.String getRealUser(); /** * <code>optional string real_user = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRealUserBytes(); } /** * <pre> * User Information proto. Included in ConnectionHeader on connection setup * </pre> * * Protobuf type {@code hbase.pb.UserInformation} */ public static final class UserInformation extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.UserInformation) UserInformationOrBuilder { // Use UserInformation.newBuilder() to construct. private UserInformation(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UserInformation() { effectiveUser_ = ""; realUser_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UserInformation( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; effectiveUser_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; realUser_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder.class); } private int bitField0_; public static final int EFFECTIVE_USER_FIELD_NUMBER = 1; private volatile java.lang.Object effectiveUser_; /** * <code>required string effective_user = 1;</code> */ public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string effective_user = 1;</code> */ public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { effectiveUser_ = s; } return s; } } /** * <code>required string effective_user = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEffectiveUserBytes() { java.lang.Object ref = effectiveUser_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); effectiveUser_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int REAL_USER_FIELD_NUMBER = 2; private volatile java.lang.Object realUser_; /** * <code>optional string real_user = 2;</code> */ public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string real_user = 2;</code> */ public java.lang.String getRealUser() { java.lang.Object ref = realUser_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { realUser_ = s; } return s; } } /** * <code>optional string real_user = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRealUserBytes() { java.lang.Object ref = realUser_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); realUser_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasEffectiveUser()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, effectiveUser_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, realUser_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, effectiveUser_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, realUser_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) obj; boolean result = true; result = result && (hasEffectiveUser() == other.hasEffectiveUser()); if (hasEffectiveUser()) { result = result && getEffectiveUser() .equals(other.getEffectiveUser()); } result = result && (hasRealUser() == other.hasRealUser()); if (hasRealUser()) { result = result && getRealUser() .equals(other.getRealUser()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEffectiveUser()) { hash = (37 * hash) + EFFECTIVE_USER_FIELD_NUMBER; hash = (53 * hash) + getEffectiveUser().hashCode(); } if (hasRealUser()) { hash = (37 * hash) + REAL_USER_FIELD_NUMBER; hash = (53 * hash) + getRealUser().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * User Information proto. Included in ConnectionHeader on connection setup * </pre> * * Protobuf type {@code hbase.pb.UserInformation} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.UserInformation) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); effectiveUser_ = ""; bitField0_ = (bitField0_ & ~0x00000001); realUser_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.effectiveUser_ = effectiveUser_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.realUser_ = realUser_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this; if (other.hasEffectiveUser()) { bitField0_ |= 0x00000001; effectiveUser_ = other.effectiveUser_; onChanged(); } if (other.hasRealUser()) { bitField0_ |= 0x00000002; realUser_ = other.realUser_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasEffectiveUser()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object effectiveUser_ = ""; /** * <code>required string effective_user = 1;</code> */ public boolean hasEffectiveUser() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string effective_user = 1;</code> */ public java.lang.String getEffectiveUser() { java.lang.Object ref = effectiveUser_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { effectiveUser_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string effective_user = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getEffectiveUserBytes() { java.lang.Object ref = effectiveUser_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); effectiveUser_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string effective_user = 1;</code> */ public Builder setEffectiveUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); return this; } /** * <code>required string effective_user = 1;</code> */ public Builder clearEffectiveUser() { bitField0_ = (bitField0_ & ~0x00000001); effectiveUser_ = getDefaultInstance().getEffectiveUser(); onChanged(); return this; } /** * <code>required string effective_user = 1;</code> */ public Builder setEffectiveUserBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; effectiveUser_ = value; onChanged(); return this; } private java.lang.Object realUser_ = ""; /** * <code>optional string real_user = 2;</code> */ public boolean hasRealUser() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string real_user = 2;</code> */ public java.lang.String getRealUser() { java.lang.Object ref = realUser_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { realUser_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string real_user = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRealUserBytes() { java.lang.Object ref = realUser_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); realUser_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string real_user = 2;</code> */ public Builder setRealUser( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; realUser_ = value; onChanged(); return this; } /** * <code>optional string real_user = 2;</code> */ public Builder clearRealUser() { bitField0_ = (bitField0_ & ~0x00000002); realUser_ = getDefaultInstance().getRealUser(); onChanged(); return this; } /** * <code>optional string real_user = 2;</code> */ public Builder setRealUserBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; realUser_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.UserInformation) } // @@protoc_insertion_point(class_scope:hbase.pb.UserInformation) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UserInformation> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<UserInformation>() { public UserInformation parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new UserInformation(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UserInformation> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UserInformation> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ConnectionHeaderOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ConnectionHeader) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ boolean hasUserInfo(); /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo(); /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder(); /** * <code>optional string service_name = 2;</code> */ boolean hasServiceName(); /** * <code>optional string service_name = 2;</code> */ java.lang.String getServiceName(); /** * <code>optional string service_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceNameBytes(); /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ boolean hasCellBlockCodecClass(); /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ java.lang.String getCellBlockCodecClass(); /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellBlockCodecClassBytes(); /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ boolean hasCellBlockCompressorClass(); /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ java.lang.String getCellBlockCompressorClass(); /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellBlockCompressorClassBytes(); /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ boolean hasVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo(); /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ boolean hasRpcCryptoCipherTransformation(); /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ java.lang.String getRpcCryptoCipherTransformation(); /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRpcCryptoCipherTransformationBytes(); } /** * <pre> * This is sent on connection setup after the connection preamble is sent. * </pre> * * Protobuf type {@code hbase.pb.ConnectionHeader} */ public static final class ConnectionHeader extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ConnectionHeader) ConnectionHeaderOrBuilder { // Use ConnectionHeader.newBuilder() to construct. private ConnectionHeader(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConnectionHeader() { serviceName_ = ""; cellBlockCodecClass_ = ""; cellBlockCompressorClass_ = ""; rpcCryptoCipherTransformation_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ConnectionHeader( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = userInfo_.toBuilder(); } userInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(userInfo_); userInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; serviceName_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; cellBlockCodecClass_ = bs; break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; cellBlockCompressorClass_ = bs; break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = versionInfo_.toBuilder(); } versionInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(versionInfo_); versionInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 50: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000020; rpcCryptoCipherTransformation_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); } private int bitField0_; public static final int USER_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_; /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } public static final int SERVICE_NAME_FIELD_NUMBER = 2; private volatile java.lang.Object serviceName_; /** * <code>optional string service_name = 2;</code> */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string service_name = 2;</code> */ public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { serviceName_ = s; } return s; } } /** * <code>optional string service_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceNameBytes() { java.lang.Object ref = serviceName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER = 3; private volatile java.lang.Object cellBlockCodecClass_; /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellBlockCodecClass_ = s; } return s; } } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellBlockCodecClassBytes() { java.lang.Object ref = cellBlockCodecClass_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCodecClass_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER = 4; private volatile java.lang.Object cellBlockCompressorClass_; /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellBlockCompressorClass_ = s; } return s; } } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { java.lang.Object ref = cellBlockCompressorClass_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCompressorClass_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int VERSION_INFO_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_; /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } public static final int RPC_CRYPTO_CIPHER_TRANSFORMATION_FIELD_NUMBER = 6; private volatile java.lang.Object rpcCryptoCipherTransformation_; /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public boolean hasRpcCryptoCipherTransformation() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public java.lang.String getRpcCryptoCipherTransformation() { java.lang.Object ref = rpcCryptoCipherTransformation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rpcCryptoCipherTransformation_ = s; } return s; } } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRpcCryptoCipherTransformationBytes() { java.lang.Object ref = rpcCryptoCipherTransformation_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rpcCryptoCipherTransformation_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, cellBlockCodecClass_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, cellBlockCompressorClass_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, getVersionInfo()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, rpcCryptoCipherTransformation_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getUserInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, cellBlockCodecClass_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, cellBlockCompressorClass_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getVersionInfo()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(6, rpcCryptoCipherTransformation_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) obj; boolean result = true; result = result && (hasUserInfo() == other.hasUserInfo()); if (hasUserInfo()) { result = result && getUserInfo() .equals(other.getUserInfo()); } result = result && (hasServiceName() == other.hasServiceName()); if (hasServiceName()) { result = result && getServiceName() .equals(other.getServiceName()); } result = result && (hasCellBlockCodecClass() == other.hasCellBlockCodecClass()); if (hasCellBlockCodecClass()) { result = result && getCellBlockCodecClass() .equals(other.getCellBlockCodecClass()); } result = result && (hasCellBlockCompressorClass() == other.hasCellBlockCompressorClass()); if (hasCellBlockCompressorClass()) { result = result && getCellBlockCompressorClass() .equals(other.getCellBlockCompressorClass()); } result = result && (hasVersionInfo() == other.hasVersionInfo()); if (hasVersionInfo()) { result = result && getVersionInfo() .equals(other.getVersionInfo()); } result = result && (hasRpcCryptoCipherTransformation() == other.hasRpcCryptoCipherTransformation()); if (hasRpcCryptoCipherTransformation()) { result = result && getRpcCryptoCipherTransformation() .equals(other.getRpcCryptoCipherTransformation()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUserInfo()) { hash = (37 * hash) + USER_INFO_FIELD_NUMBER; hash = (53 * hash) + getUserInfo().hashCode(); } if (hasServiceName()) { hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER; hash = (53 * hash) + getServiceName().hashCode(); } if (hasCellBlockCodecClass()) { hash = (37 * hash) + CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER; hash = (53 * hash) + getCellBlockCodecClass().hashCode(); } if (hasCellBlockCompressorClass()) { hash = (37 * hash) + CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER; hash = (53 * hash) + getCellBlockCompressorClass().hashCode(); } if (hasVersionInfo()) { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } if (hasRpcCryptoCipherTransformation()) { hash = (37 * hash) + RPC_CRYPTO_CIPHER_TRANSFORMATION_FIELD_NUMBER; hash = (53 * hash) + getRpcCryptoCipherTransformation().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This is sent on connection setup after the connection preamble is sent. * </pre> * * Protobuf type {@code hbase.pb.ConnectionHeader} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ConnectionHeader) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUserInfoFieldBuilder(); getVersionInfoFieldBuilder(); } } public Builder clear() { super.clear(); if (userInfoBuilder_ == null) { userInfo_ = null; } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); serviceName_ = ""; bitField0_ = (bitField0_ & ~0x00000002); cellBlockCodecClass_ = ""; bitField0_ = (bitField0_ & ~0x00000004); cellBlockCompressorClass_ = ""; bitField0_ = (bitField0_ & ~0x00000008); if (versionInfoBuilder_ == null) { versionInfo_ = null; } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); rpcCryptoCipherTransformation_ = ""; bitField0_ = (bitField0_ & ~0x00000020); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (userInfoBuilder_ == null) { result.userInfo_ = userInfo_; } else { result.userInfo_ = userInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.serviceName_ = serviceName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.cellBlockCodecClass_ = cellBlockCodecClass_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.cellBlockCompressorClass_ = cellBlockCompressorClass_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (versionInfoBuilder_ == null) { result.versionInfo_ = versionInfo_; } else { result.versionInfo_ = versionInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.rpcCryptoCipherTransformation_ = rpcCryptoCipherTransformation_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this; if (other.hasUserInfo()) { mergeUserInfo(other.getUserInfo()); } if (other.hasServiceName()) { bitField0_ |= 0x00000002; serviceName_ = other.serviceName_; onChanged(); } if (other.hasCellBlockCodecClass()) { bitField0_ |= 0x00000004; cellBlockCodecClass_ = other.cellBlockCodecClass_; onChanged(); } if (other.hasCellBlockCompressorClass()) { bitField0_ |= 0x00000008; cellBlockCompressorClass_ = other.cellBlockCompressorClass_; onChanged(); } if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } if (other.hasRpcCryptoCipherTransformation()) { bitField0_ |= 0x00000020; rpcCryptoCipherTransformation_ = other.rpcCryptoCipherTransformation_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasUserInfo()) { if (!getUserInfo().isInitialized()) { return false; } } if (hasVersionInfo()) { if (!getVersionInfo().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_; /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public boolean hasUserInfo() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() { if (userInfoBuilder_ == null) { return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } else { return userInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder setUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userInfo_ = value; onChanged(); } else { userInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder setUserInfo( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) { if (userInfoBuilder_ == null) { userInfo_ = builderForValue.build(); onChanged(); } else { userInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) { if (userInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && userInfo_ != null && userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) { userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial(); } else { userInfo_ = value; } onChanged(); } else { userInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public Builder clearUserInfo() { if (userInfoBuilder_ == null) { userInfo_ = null; onChanged(); } else { userInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUserInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() { if (userInfoBuilder_ != null) { return userInfoBuilder_.getMessageOrBuilder(); } else { return userInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance() : userInfo_; } } /** * <code>optional .hbase.pb.UserInformation user_info = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> getUserInfoFieldBuilder() { if (userInfoBuilder_ == null) { userInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>( getUserInfo(), getParentForChildren(), isClean()); userInfo_ = null; } return userInfoBuilder_; } private java.lang.Object serviceName_ = ""; /** * <code>optional string service_name = 2;</code> */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string service_name = 2;</code> */ public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { serviceName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string service_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceNameBytes() { java.lang.Object ref = serviceName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string service_name = 2;</code> */ public Builder setServiceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); return this; } /** * <code>optional string service_name = 2;</code> */ public Builder clearServiceName() { bitField0_ = (bitField0_ & ~0x00000002); serviceName_ = getDefaultInstance().getServiceName(); onChanged(); return this; } /** * <code>optional string service_name = 2;</code> */ public Builder setServiceNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); return this; } private java.lang.Object cellBlockCodecClass_ = ""; /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public boolean hasCellBlockCodecClass() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public java.lang.String getCellBlockCodecClass() { java.lang.Object ref = cellBlockCodecClass_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellBlockCodecClass_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellBlockCodecClassBytes() { java.lang.Object ref = cellBlockCodecClass_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCodecClass_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public Builder setCellBlockCodecClass( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; cellBlockCodecClass_ = value; onChanged(); return this; } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public Builder clearCellBlockCodecClass() { bitField0_ = (bitField0_ & ~0x00000004); cellBlockCodecClass_ = getDefaultInstance().getCellBlockCodecClass(); onChanged(); return this; } /** * <pre> * Cell block codec we will use sending over optional cell blocks. Server throws exception * if cannot deal. Null means no codec'ing going on so we are pb all the time (SLOW!!!) * </pre> * * <code>optional string cell_block_codec_class = 3;</code> */ public Builder setCellBlockCodecClassBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; cellBlockCodecClass_ = value; onChanged(); return this; } private java.lang.Object cellBlockCompressorClass_ = ""; /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public boolean hasCellBlockCompressorClass() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public java.lang.String getCellBlockCompressorClass() { java.lang.Object ref = cellBlockCompressorClass_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellBlockCompressorClass_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getCellBlockCompressorClassBytes() { java.lang.Object ref = cellBlockCompressorClass_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellBlockCompressorClass_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public Builder setCellBlockCompressorClass( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; cellBlockCompressorClass_ = value; onChanged(); return this; } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public Builder clearCellBlockCompressorClass() { bitField0_ = (bitField0_ & ~0x00000008); cellBlockCompressorClass_ = getDefaultInstance().getCellBlockCompressorClass(); onChanged(); return this; } /** * <pre> * Compressor we will use if cell block is compressed. Server will throw exception if not supported. * Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. * </pre> * * <code>optional string cell_block_compressor_class = 4;</code> */ public Builder setCellBlockCompressorClassBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; cellBlockCompressorClass_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_; /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public boolean hasVersionInfo() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() { if (versionInfoBuilder_ == null) { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } else { return versionInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder setVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } versionInfo_ = value; onChanged(); } else { versionInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder setVersionInfo( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder builderForValue) { if (versionInfoBuilder_ == null) { versionInfo_ = builderForValue.build(); onChanged(); } else { versionInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder mergeVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) { if (versionInfoBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && versionInfo_ != null && versionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) { versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial(); } else { versionInfo_ = value; } onChanged(); } else { versionInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public Builder clearVersionInfo() { if (versionInfoBuilder_ == null) { versionInfo_ = null; onChanged(); } else { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder getVersionInfoBuilder() { bitField0_ |= 0x00000010; onChanged(); return getVersionInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() { if (versionInfoBuilder_ != null) { return versionInfoBuilder_.getMessageOrBuilder(); } else { return versionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance() : versionInfo_; } } /** * <code>optional .hbase.pb.VersionInfo version_info = 5;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> getVersionInfoFieldBuilder() { if (versionInfoBuilder_ == null) { versionInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>( getVersionInfo(), getParentForChildren(), isClean()); versionInfo_ = null; } return versionInfoBuilder_; } private java.lang.Object rpcCryptoCipherTransformation_ = ""; /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public boolean hasRpcCryptoCipherTransformation() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public java.lang.String getRpcCryptoCipherTransformation() { java.lang.Object ref = rpcCryptoCipherTransformation_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { rpcCryptoCipherTransformation_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRpcCryptoCipherTransformationBytes() { java.lang.Object ref = rpcCryptoCipherTransformation_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); rpcCryptoCipherTransformation_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public Builder setRpcCryptoCipherTransformation( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; rpcCryptoCipherTransformation_ = value; onChanged(); return this; } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public Builder clearRpcCryptoCipherTransformation() { bitField0_ = (bitField0_ & ~0x00000020); rpcCryptoCipherTransformation_ = getDefaultInstance().getRpcCryptoCipherTransformation(); onChanged(); return this; } /** * <pre> * the transformation for rpc AES encryption with Apache Commons Crypto * </pre> * * <code>optional string rpc_crypto_cipher_transformation = 6;</code> */ public Builder setRpcCryptoCipherTransformationBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; rpcCryptoCipherTransformation_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeader) } // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ConnectionHeader> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ConnectionHeader>() { public ConnectionHeader parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ConnectionHeader(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ConnectionHeader> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ConnectionHeader> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ConnectionHeaderResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ConnectionHeaderResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ boolean hasCryptoCipherMeta(); /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta(); /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder(); } /** * <pre> * This is sent by rpc server to negotiate the data if necessary * </pre> * * Protobuf type {@code hbase.pb.ConnectionHeaderResponse} */ public static final class ConnectionHeaderResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ConnectionHeaderResponse) ConnectionHeaderResponseOrBuilder { // Use ConnectionHeaderResponse.newBuilder() to construct. private ConnectionHeaderResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConnectionHeaderResponse() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ConnectionHeaderResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = cryptoCipherMeta_.toBuilder(); } cryptoCipherMeta_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(cryptoCipherMeta_); cryptoCipherMeta_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.Builder.class); } private int bitField0_; public static final int CRYPTO_CIPHER_META_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta cryptoCipherMeta_; /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public boolean hasCryptoCipherMeta() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta() { return cryptoCipherMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance() : cryptoCipherMeta_; } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder() { return cryptoCipherMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance() : cryptoCipherMeta_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasCryptoCipherMeta()) { if (!getCryptoCipherMeta().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getCryptoCipherMeta()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getCryptoCipherMeta()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse) obj; boolean result = true; result = result && (hasCryptoCipherMeta() == other.hasCryptoCipherMeta()); if (hasCryptoCipherMeta()) { result = result && getCryptoCipherMeta() .equals(other.getCryptoCipherMeta()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCryptoCipherMeta()) { hash = (37 * hash) + CRYPTO_CIPHER_META_FIELD_NUMBER; hash = (53 * hash) + getCryptoCipherMeta().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * This is sent by rpc server to negotiate the data if necessary * </pre> * * Protobuf type {@code hbase.pb.ConnectionHeaderResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ConnectionHeaderResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getCryptoCipherMetaFieldBuilder(); } } public Builder clear() { super.clear(); if (cryptoCipherMetaBuilder_ == null) { cryptoCipherMeta_ = null; } else { cryptoCipherMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (cryptoCipherMetaBuilder_ == null) { result.cryptoCipherMeta_ = cryptoCipherMeta_; } else { result.cryptoCipherMeta_ = cryptoCipherMetaBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.getDefaultInstance()) return this; if (other.hasCryptoCipherMeta()) { mergeCryptoCipherMeta(other.getCryptoCipherMeta()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasCryptoCipherMeta()) { if (!getCryptoCipherMeta().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta cryptoCipherMeta_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder> cryptoCipherMetaBuilder_; /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public boolean hasCryptoCipherMeta() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta() { if (cryptoCipherMetaBuilder_ == null) { return cryptoCipherMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance() : cryptoCipherMeta_; } else { return cryptoCipherMetaBuilder_.getMessage(); } } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public Builder setCryptoCipherMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta value) { if (cryptoCipherMetaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cryptoCipherMeta_ = value; onChanged(); } else { cryptoCipherMetaBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public Builder setCryptoCipherMeta( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder builderForValue) { if (cryptoCipherMetaBuilder_ == null) { cryptoCipherMeta_ = builderForValue.build(); onChanged(); } else { cryptoCipherMetaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public Builder mergeCryptoCipherMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta value) { if (cryptoCipherMetaBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && cryptoCipherMeta_ != null && cryptoCipherMeta_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance()) { cryptoCipherMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.newBuilder(cryptoCipherMeta_).mergeFrom(value).buildPartial(); } else { cryptoCipherMeta_ = value; } onChanged(); } else { cryptoCipherMetaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public Builder clearCryptoCipherMeta() { if (cryptoCipherMetaBuilder_ == null) { cryptoCipherMeta_ = null; onChanged(); } else { cryptoCipherMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder getCryptoCipherMetaBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCryptoCipherMetaFieldBuilder().getBuilder(); } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder() { if (cryptoCipherMetaBuilder_ != null) { return cryptoCipherMetaBuilder_.getMessageOrBuilder(); } else { return cryptoCipherMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance() : cryptoCipherMeta_; } } /** * <pre> * To use Apache Commons Crypto, negotiate the metadata * </pre> * * <code>optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder> getCryptoCipherMetaFieldBuilder() { if (cryptoCipherMetaBuilder_ == null) { cryptoCipherMetaBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder>( getCryptoCipherMeta(), getParentForChildren(), isClean()); cryptoCipherMeta_ = null; } return cryptoCipherMetaBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeaderResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeaderResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ConnectionHeaderResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ConnectionHeaderResponse>() { public ConnectionHeaderResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ConnectionHeaderResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ConnectionHeaderResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ConnectionHeaderResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CellBlockMetaOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CellBlockMeta) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ boolean hasLength(); /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ int getLength(); } /** * <pre> * Optional Cell block Message. Included in client RequestHeader * </pre> * * Protobuf type {@code hbase.pb.CellBlockMeta} */ public static final class CellBlockMeta extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CellBlockMeta) CellBlockMetaOrBuilder { // Use CellBlockMeta.newBuilder() to construct. private CellBlockMeta(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CellBlockMeta() { length_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CellBlockMeta( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; length_ = input.readUInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } private int bitField0_; public static final int LENGTH_FIELD_NUMBER = 1; private int length_; /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ public int getLength() { return length_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, length_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, length_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta) obj; boolean result = true; result = result && (hasLength() == other.hasLength()); if (hasLength()) { result = result && (getLength() == other.getLength()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLength()) { hash = (37 * hash) + LENGTH_FIELD_NUMBER; hash = (53 * hash) + getLength(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Optional Cell block Message. Included in client RequestHeader * </pre> * * Protobuf type {@code hbase.pb.CellBlockMeta} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CellBlockMeta) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); length_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.length_ = length_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; if (other.hasLength()) { setLength(other.getLength()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int length_ ; /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ public boolean hasLength() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ public int getLength() { return length_; } /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ public Builder setLength(int value) { bitField0_ |= 0x00000001; length_ = value; onChanged(); return this; } /** * <pre> * Length of the following cell block. Could calculate it but convenient having it too hand. * </pre> * * <code>optional uint32 length = 1;</code> */ public Builder clearLength() { bitField0_ = (bitField0_ & ~0x00000001); length_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) } // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CellBlockMeta> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CellBlockMeta>() { public CellBlockMeta parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CellBlockMeta(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CellBlockMeta> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CellBlockMeta> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ExceptionResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ExceptionResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ boolean hasExceptionClassName(); /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ java.lang.String getExceptionClassName(); /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExceptionClassNameBytes(); /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ boolean hasStackTrace(); /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ java.lang.String getStackTrace(); /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStackTraceBytes(); /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ boolean hasHostname(); /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ java.lang.String getHostname(); /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getHostnameBytes(); /** * <code>optional int32 port = 4;</code> */ boolean hasPort(); /** * <code>optional int32 port = 4;</code> */ int getPort(); /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ boolean hasDoNotRetry(); /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ boolean getDoNotRetry(); } /** * <pre> * At the RPC layer, this message is used to carry * the server side exception to the RPC client. * </pre> * * Protobuf type {@code hbase.pb.ExceptionResponse} */ public static final class ExceptionResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ExceptionResponse) ExceptionResponseOrBuilder { // Use ExceptionResponse.newBuilder() to construct. private ExceptionResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExceptionResponse() { exceptionClassName_ = ""; stackTrace_ = ""; hostname_ = ""; port_ = 0; doNotRetry_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExceptionResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; exceptionClassName_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; stackTrace_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; hostname_ = bs; break; } case 32: { bitField0_ |= 0x00000008; port_ = input.readInt32(); break; } case 40: { bitField0_ |= 0x00000010; doNotRetry_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); } private int bitField0_; public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object exceptionClassName_; /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exceptionClassName_ = s; } return s; } } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExceptionClassNameBytes() { java.lang.Object ref = exceptionClassName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exceptionClassName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int STACK_TRACE_FIELD_NUMBER = 2; private volatile java.lang.Object stackTrace_; /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { stackTrace_ = s; } return s; } } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStackTraceBytes() { java.lang.Object ref = stackTrace_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); stackTrace_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int HOSTNAME_FIELD_NUMBER = 3; private volatile java.lang.Object hostname_; /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostname_ = s; } return s; } } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getHostnameBytes() { java.lang.Object ref = hostname_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostname_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int PORT_FIELD_NUMBER = 4; private int port_; /** * <code>optional int32 port = 4;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional int32 port = 4;</code> */ public int getPort() { return port_; } public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; private boolean doNotRetry_; /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ public boolean getDoNotRetry() { return doNotRetry_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, exceptionClassName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, stackTrace_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hostname_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeInt32(4, port_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, doNotRetry_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, exceptionClassName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, stackTrace_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hostname_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(4, port_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(5, doNotRetry_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse) obj; boolean result = true; result = result && (hasExceptionClassName() == other.hasExceptionClassName()); if (hasExceptionClassName()) { result = result && getExceptionClassName() .equals(other.getExceptionClassName()); } result = result && (hasStackTrace() == other.hasStackTrace()); if (hasStackTrace()) { result = result && getStackTrace() .equals(other.getStackTrace()); } result = result && (hasHostname() == other.hasHostname()); if (hasHostname()) { result = result && getHostname() .equals(other.getHostname()); } result = result && (hasPort() == other.hasPort()); if (hasPort()) { result = result && (getPort() == other.getPort()); } result = result && (hasDoNotRetry() == other.hasDoNotRetry()); if (hasDoNotRetry()) { result = result && (getDoNotRetry() == other.getDoNotRetry()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasExceptionClassName()) { hash = (37 * hash) + EXCEPTION_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getExceptionClassName().hashCode(); } if (hasStackTrace()) { hash = (37 * hash) + STACK_TRACE_FIELD_NUMBER; hash = (53 * hash) + getStackTrace().hashCode(); } if (hasHostname()) { hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; hash = (53 * hash) + getHostname().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } if (hasDoNotRetry()) { hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getDoNotRetry()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * At the RPC layer, this message is used to carry * the server side exception to the RPC client. * </pre> * * Protobuf type {@code hbase.pb.ExceptionResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ExceptionResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); exceptionClassName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); stackTrace_ = ""; bitField0_ = (bitField0_ & ~0x00000002); hostname_ = ""; bitField0_ = (bitField0_ & ~0x00000004); port_ = 0; bitField0_ = (bitField0_ & ~0x00000008); doNotRetry_ = false; bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.exceptionClassName_ = exceptionClassName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.stackTrace_ = stackTrace_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.hostname_ = hostname_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.port_ = port_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.doNotRetry_ = doNotRetry_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; if (other.hasExceptionClassName()) { bitField0_ |= 0x00000001; exceptionClassName_ = other.exceptionClassName_; onChanged(); } if (other.hasStackTrace()) { bitField0_ |= 0x00000002; stackTrace_ = other.stackTrace_; onChanged(); } if (other.hasHostname()) { bitField0_ |= 0x00000004; hostname_ = other.hostname_; onChanged(); } if (other.hasPort()) { setPort(other.getPort()); } if (other.hasDoNotRetry()) { setDoNotRetry(other.getDoNotRetry()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object exceptionClassName_ = ""; /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public java.lang.String getExceptionClassName() { java.lang.Object ref = exceptionClassName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { exceptionClassName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExceptionClassNameBytes() { java.lang.Object ref = exceptionClassName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); exceptionClassName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public Builder setExceptionClassName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; exceptionClassName_ = value; onChanged(); return this; } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public Builder clearExceptionClassName() { bitField0_ = (bitField0_ & ~0x00000001); exceptionClassName_ = getDefaultInstance().getExceptionClassName(); onChanged(); return this; } /** * <pre> * Class name of the exception thrown from the server * </pre> * * <code>optional string exception_class_name = 1;</code> */ public Builder setExceptionClassNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; exceptionClassName_ = value; onChanged(); return this; } private java.lang.Object stackTrace_ = ""; /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public boolean hasStackTrace() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public java.lang.String getStackTrace() { java.lang.Object ref = stackTrace_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { stackTrace_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStackTraceBytes() { java.lang.Object ref = stackTrace_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); stackTrace_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public Builder setStackTrace( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); return this; } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public Builder clearStackTrace() { bitField0_ = (bitField0_ & ~0x00000002); stackTrace_ = getDefaultInstance().getStackTrace(); onChanged(); return this; } /** * <pre> * Exception stack trace from the server side * </pre> * * <code>optional string stack_trace = 2;</code> */ public Builder setStackTraceBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; stackTrace_ = value; onChanged(); return this; } private java.lang.Object hostname_ = ""; /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public boolean hasHostname() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public java.lang.String getHostname() { java.lang.Object ref = hostname_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { hostname_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getHostnameBytes() { java.lang.Object ref = hostname_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); hostname_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public Builder setHostname( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; hostname_ = value; onChanged(); return this; } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public Builder clearHostname() { bitField0_ = (bitField0_ & ~0x00000004); hostname_ = getDefaultInstance().getHostname(); onChanged(); return this; } /** * <pre> * Optional hostname. Filled in for some exceptions such as region moved * where exception gives clue on where the region may have moved. * </pre> * * <code>optional string hostname = 3;</code> */ public Builder setHostnameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; hostname_ = value; onChanged(); return this; } private int port_ ; /** * <code>optional int32 port = 4;</code> */ public boolean hasPort() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional int32 port = 4;</code> */ public int getPort() { return port_; } /** * <code>optional int32 port = 4;</code> */ public Builder setPort(int value) { bitField0_ |= 0x00000008; port_ = value; onChanged(); return this; } /** * <code>optional int32 port = 4;</code> */ public Builder clearPort() { bitField0_ = (bitField0_ & ~0x00000008); port_ = 0; onChanged(); return this; } private boolean doNotRetry_ ; /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ public boolean hasDoNotRetry() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ public boolean getDoNotRetry() { return doNotRetry_; } /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ public Builder setDoNotRetry(boolean value) { bitField0_ |= 0x00000010; doNotRetry_ = value; onChanged(); return this; } /** * <pre> * Set if we are NOT to retry on receipt of this exception * </pre> * * <code>optional bool do_not_retry = 5;</code> */ public Builder clearDoNotRetry() { bitField0_ = (bitField0_ & ~0x00000010); doNotRetry_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ExceptionResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ExceptionResponse>() { public ExceptionResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ExceptionResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ExceptionResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ExceptionResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CryptoCipherMetaOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CryptoCipherMeta) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string transformation = 1;</code> */ boolean hasTransformation(); /** * <code>required string transformation = 1;</code> */ java.lang.String getTransformation(); /** * <code>required string transformation = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTransformationBytes(); /** * <code>optional bytes inKey = 2;</code> */ boolean hasInKey(); /** * <code>optional bytes inKey = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInKey(); /** * <code>optional bytes inIv = 3;</code> */ boolean hasInIv(); /** * <code>optional bytes inIv = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInIv(); /** * <code>optional bytes outKey = 4;</code> */ boolean hasOutKey(); /** * <code>optional bytes outKey = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutKey(); /** * <code>optional bytes outIv = 5;</code> */ boolean hasOutIv(); /** * <code>optional bytes outIv = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutIv(); } /** * <pre> ** * Cipher meta for Crypto * </pre> * * Protobuf type {@code hbase.pb.CryptoCipherMeta} */ public static final class CryptoCipherMeta extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CryptoCipherMeta) CryptoCipherMetaOrBuilder { // Use CryptoCipherMeta.newBuilder() to construct. private CryptoCipherMeta(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CryptoCipherMeta() { transformation_ = ""; inKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; inIv_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; outKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; outIv_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CryptoCipherMeta( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; transformation_ = bs; break; } case 18: { bitField0_ |= 0x00000002; inKey_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; inIv_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000008; outKey_ = input.readBytes(); break; } case 42: { bitField0_ |= 0x00000010; outIv_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder.class); } private int bitField0_; public static final int TRANSFORMATION_FIELD_NUMBER = 1; private volatile java.lang.Object transformation_; /** * <code>required string transformation = 1;</code> */ public boolean hasTransformation() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string transformation = 1;</code> */ public java.lang.String getTransformation() { java.lang.Object ref = transformation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { transformation_ = s; } return s; } } /** * <code>required string transformation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTransformationBytes() { java.lang.Object ref = transformation_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); transformation_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int INKEY_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString inKey_; /** * <code>optional bytes inKey = 2;</code> */ public boolean hasInKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes inKey = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInKey() { return inKey_; } public static final int INIV_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString inIv_; /** * <code>optional bytes inIv = 3;</code> */ public boolean hasInIv() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes inIv = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInIv() { return inIv_; } public static final int OUTKEY_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString outKey_; /** * <code>optional bytes outKey = 4;</code> */ public boolean hasOutKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes outKey = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutKey() { return outKey_; } public static final int OUTIV_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString outIv_; /** * <code>optional bytes outIv = 5;</code> */ public boolean hasOutIv() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes outIv = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutIv() { return outIv_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTransformation()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, transformation_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, inKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, inIv_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, outKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, outIv_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, transformation_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, inKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, inIv_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(4, outKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(5, outIv_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta) obj; boolean result = true; result = result && (hasTransformation() == other.hasTransformation()); if (hasTransformation()) { result = result && getTransformation() .equals(other.getTransformation()); } result = result && (hasInKey() == other.hasInKey()); if (hasInKey()) { result = result && getInKey() .equals(other.getInKey()); } result = result && (hasInIv() == other.hasInIv()); if (hasInIv()) { result = result && getInIv() .equals(other.getInIv()); } result = result && (hasOutKey() == other.hasOutKey()); if (hasOutKey()) { result = result && getOutKey() .equals(other.getOutKey()); } result = result && (hasOutIv() == other.hasOutIv()); if (hasOutIv()) { result = result && getOutIv() .equals(other.getOutIv()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTransformation()) { hash = (37 * hash) + TRANSFORMATION_FIELD_NUMBER; hash = (53 * hash) + getTransformation().hashCode(); } if (hasInKey()) { hash = (37 * hash) + INKEY_FIELD_NUMBER; hash = (53 * hash) + getInKey().hashCode(); } if (hasInIv()) { hash = (37 * hash) + INIV_FIELD_NUMBER; hash = (53 * hash) + getInIv().hashCode(); } if (hasOutKey()) { hash = (37 * hash) + OUTKEY_FIELD_NUMBER; hash = (53 * hash) + getOutKey().hashCode(); } if (hasOutIv()) { hash = (37 * hash) + OUTIV_FIELD_NUMBER; hash = (53 * hash) + getOutIv().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Cipher meta for Crypto * </pre> * * Protobuf type {@code hbase.pb.CryptoCipherMeta} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CryptoCipherMeta) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); transformation_ = ""; bitField0_ = (bitField0_ & ~0x00000001); inKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); inIv_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); outKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); outIv_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.transformation_ = transformation_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.inKey_ = inKey_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.inIv_ = inIv_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.outKey_ = outKey_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.outIv_ = outIv_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance()) return this; if (other.hasTransformation()) { bitField0_ |= 0x00000001; transformation_ = other.transformation_; onChanged(); } if (other.hasInKey()) { setInKey(other.getInKey()); } if (other.hasInIv()) { setInIv(other.getInIv()); } if (other.hasOutKey()) { setOutKey(other.getOutKey()); } if (other.hasOutIv()) { setOutIv(other.getOutIv()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasTransformation()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object transformation_ = ""; /** * <code>required string transformation = 1;</code> */ public boolean hasTransformation() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string transformation = 1;</code> */ public java.lang.String getTransformation() { java.lang.Object ref = transformation_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { transformation_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string transformation = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTransformationBytes() { java.lang.Object ref = transformation_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); transformation_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string transformation = 1;</code> */ public Builder setTransformation( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; transformation_ = value; onChanged(); return this; } /** * <code>required string transformation = 1;</code> */ public Builder clearTransformation() { bitField0_ = (bitField0_ & ~0x00000001); transformation_ = getDefaultInstance().getTransformation(); onChanged(); return this; } /** * <code>required string transformation = 1;</code> */ public Builder setTransformationBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; transformation_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString inKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes inKey = 2;</code> */ public boolean hasInKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes inKey = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInKey() { return inKey_; } /** * <code>optional bytes inKey = 2;</code> */ public Builder setInKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; inKey_ = value; onChanged(); return this; } /** * <code>optional bytes inKey = 2;</code> */ public Builder clearInKey() { bitField0_ = (bitField0_ & ~0x00000002); inKey_ = getDefaultInstance().getInKey(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString inIv_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes inIv = 3;</code> */ public boolean hasInIv() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes inIv = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getInIv() { return inIv_; } /** * <code>optional bytes inIv = 3;</code> */ public Builder setInIv(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; inIv_ = value; onChanged(); return this; } /** * <code>optional bytes inIv = 3;</code> */ public Builder clearInIv() { bitField0_ = (bitField0_ & ~0x00000004); inIv_ = getDefaultInstance().getInIv(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString outKey_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes outKey = 4;</code> */ public boolean hasOutKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes outKey = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutKey() { return outKey_; } /** * <code>optional bytes outKey = 4;</code> */ public Builder setOutKey(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; outKey_ = value; onChanged(); return this; } /** * <code>optional bytes outKey = 4;</code> */ public Builder clearOutKey() { bitField0_ = (bitField0_ & ~0x00000008); outKey_ = getDefaultInstance().getOutKey(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString outIv_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes outIv = 5;</code> */ public boolean hasOutIv() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes outIv = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getOutIv() { return outIv_; } /** * <code>optional bytes outIv = 5;</code> */ public Builder setOutIv(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; outIv_ = value; onChanged(); return this; } /** * <code>optional bytes outIv = 5;</code> */ public Builder clearOutIv() { bitField0_ = (bitField0_ & ~0x00000010); outIv_ = getDefaultInstance().getOutIv(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CryptoCipherMeta) } // @@protoc_insertion_point(class_scope:hbase.pb.CryptoCipherMeta) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CryptoCipherMeta> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CryptoCipherMeta>() { public CryptoCipherMeta parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CryptoCipherMeta(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CryptoCipherMeta> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CryptoCipherMeta> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RequestHeaderOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RequestHeader) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ boolean hasCallId(); /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ int getCallId(); /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ boolean hasTraceInfo(); /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo(); /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder(); /** * <code>optional string method_name = 3;</code> */ boolean hasMethodName(); /** * <code>optional string method_name = 3;</code> */ java.lang.String getMethodName(); /** * <code>optional string method_name = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getMethodNameBytes(); /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ boolean hasRequestParam(); /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ boolean getRequestParam(); /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ boolean hasCellBlockMeta(); /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ boolean hasPriority(); /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ int getPriority(); /** * <code>optional uint32 timeout = 7;</code> */ boolean hasTimeout(); /** * <code>optional uint32 timeout = 7;</code> */ int getTimeout(); } /** * <pre> * Header sent making a request. * </pre> * * Protobuf type {@code hbase.pb.RequestHeader} */ public static final class RequestHeader extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RequestHeader) RequestHeaderOrBuilder { // Use RequestHeader.newBuilder() to construct. private RequestHeader(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RequestHeader() { callId_ = 0; methodName_ = ""; requestParam_ = false; priority_ = 0; timeout_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RequestHeader( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; callId_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = traceInfo_.toBuilder(); } traceInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(traceInfo_); traceInfo_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; methodName_ = bs; break; } case 32: { bitField0_ |= 0x00000008; requestParam_ = input.readBool(); break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = cellBlockMeta_.toBuilder(); } cellBlockMeta_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(cellBlockMeta_); cellBlockMeta_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 48: { bitField0_ |= 0x00000020; priority_ = input.readUInt32(); break; } case 56: { bitField0_ |= 0x00000040; timeout_ = input.readUInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.Builder.class); } private int bitField0_; public static final int CALL_ID_FIELD_NUMBER = 1; private int callId_; /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ public int getCallId() { return callId_; } public static final int TRACE_INFO_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo traceInfo_; /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } public static final int METHOD_NAME_FIELD_NUMBER = 3; private volatile java.lang.Object methodName_; /** * <code>optional string method_name = 3;</code> */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string method_name = 3;</code> */ public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { methodName_ = s; } return s; } } /** * <code>optional string method_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); methodName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int REQUEST_PARAM_FIELD_NUMBER = 4; private boolean requestParam_; /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ public boolean getRequestParam() { return requestParam_; } public static final int CELL_BLOCK_META_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } public static final int PRIORITY_FIELD_NUMBER = 6; private int priority_; /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ public int getPriority() { return priority_; } public static final int TIMEOUT_FIELD_NUMBER = 7; private int timeout_; /** * <code>optional uint32 timeout = 7;</code> */ public boolean hasTimeout() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 timeout = 7;</code> */ public int getTimeout() { return timeout_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getTraceInfo()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(4, requestParam_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, getCellBlockMeta()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(6, priority_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(7, timeout_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getTraceInfo()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(4, requestParam_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getCellBlockMeta()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(6, priority_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(7, timeout_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) obj; boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { result = result && (getCallId() == other.getCallId()); } result = result && (hasTraceInfo() == other.hasTraceInfo()); if (hasTraceInfo()) { result = result && getTraceInfo() .equals(other.getTraceInfo()); } result = result && (hasMethodName() == other.hasMethodName()); if (hasMethodName()) { result = result && getMethodName() .equals(other.getMethodName()); } result = result && (hasRequestParam() == other.hasRequestParam()); if (hasRequestParam()) { result = result && (getRequestParam() == other.getRequestParam()); } result = result && (hasCellBlockMeta() == other.hasCellBlockMeta()); if (hasCellBlockMeta()) { result = result && getCellBlockMeta() .equals(other.getCellBlockMeta()); } result = result && (hasPriority() == other.hasPriority()); if (hasPriority()) { result = result && (getPriority() == other.getPriority()); } result = result && (hasTimeout() == other.hasTimeout()); if (hasTimeout()) { result = result && (getTimeout() == other.getTimeout()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCallId()) { hash = (37 * hash) + CALL_ID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } if (hasTraceInfo()) { hash = (37 * hash) + TRACE_INFO_FIELD_NUMBER; hash = (53 * hash) + getTraceInfo().hashCode(); } if (hasMethodName()) { hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER; hash = (53 * hash) + getMethodName().hashCode(); } if (hasRequestParam()) { hash = (37 * hash) + REQUEST_PARAM_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getRequestParam()); } if (hasCellBlockMeta()) { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; hash = (53 * hash) + getCellBlockMeta().hashCode(); } if (hasPriority()) { hash = (37 * hash) + PRIORITY_FIELD_NUMBER; hash = (53 * hash) + getPriority(); } if (hasTimeout()) { hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; hash = (53 * hash) + getTimeout(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Header sent making a request. * </pre> * * Protobuf type {@code hbase.pb.RequestHeader} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RequestHeader) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeaderOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getTraceInfoFieldBuilder(); getCellBlockMetaFieldBuilder(); } } public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (traceInfoBuilder_ == null) { traceInfo_ = null; } else { traceInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); methodName_ = ""; bitField0_ = (bitField0_ & ~0x00000004); requestParam_ = false; bitField0_ = (bitField0_ & ~0x00000008); if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = null; } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); priority_ = 0; bitField0_ = (bitField0_ & ~0x00000020); timeout_ = 0; bitField0_ = (bitField0_ & ~0x00000040); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_RequestHeader_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (traceInfoBuilder_ == null) { result.traceInfo_ = traceInfo_; } else { result.traceInfo_ = traceInfoBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.methodName_ = methodName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.requestParam_ = requestParam_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (cellBlockMetaBuilder_ == null) { result.cellBlockMeta_ = cellBlockMeta_; } else { result.cellBlockMeta_ = cellBlockMetaBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.priority_ = priority_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.timeout_ = timeout_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance()) return this; if (other.hasCallId()) { setCallId(other.getCallId()); } if (other.hasTraceInfo()) { mergeTraceInfo(other.getTraceInfo()); } if (other.hasMethodName()) { bitField0_ |= 0x00000004; methodName_ = other.methodName_; onChanged(); } if (other.hasRequestParam()) { setRequestParam(other.getRequestParam()); } if (other.hasCellBlockMeta()) { mergeCellBlockMeta(other.getCellBlockMeta()); } if (other.hasPriority()) { setPriority(other.getPriority()); } if (other.hasTimeout()) { setTimeout(other.getTimeout()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int callId_ ; /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ public int getCallId() { return callId_; } /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } /** * <pre> * Monotonically increasing call_id to keep track of RPC requests and their response * </pre> * * <code>optional uint32 call_id = 1;</code> */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo traceInfo_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> traceInfoBuilder_; /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { if (traceInfoBuilder_ == null) { return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } else { return traceInfoBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder setTraceInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } traceInfo_ = value; onChanged(); } else { traceInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder setTraceInfo( org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder builderForValue) { if (traceInfoBuilder_ == null) { traceInfo_ = builderForValue.build(); onChanged(); } else { traceInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder mergeTraceInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && traceInfo_ != null && traceInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance()) { traceInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.newBuilder(traceInfo_).mergeFrom(value).buildPartial(); } else { traceInfo_ = value; } onChanged(); } else { traceInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public Builder clearTraceInfo() { if (traceInfoBuilder_ == null) { traceInfo_ = null; onChanged(); } else { traceInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder getTraceInfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTraceInfoFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { if (traceInfoBuilder_ != null) { return traceInfoBuilder_.getMessageOrBuilder(); } else { return traceInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance() : traceInfo_; } } /** * <code>optional .hbase.pb.RPCTInfo trace_info = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> getTraceInfoFieldBuilder() { if (traceInfoBuilder_ == null) { traceInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfoOrBuilder>( getTraceInfo(), getParentForChildren(), isClean()); traceInfo_ = null; } return traceInfoBuilder_; } private java.lang.Object methodName_ = ""; /** * <code>optional string method_name = 3;</code> */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string method_name = 3;</code> */ public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { methodName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string method_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); methodName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string method_name = 3;</code> */ public Builder setMethodName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; methodName_ = value; onChanged(); return this; } /** * <code>optional string method_name = 3;</code> */ public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000004); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } /** * <code>optional string method_name = 3;</code> */ public Builder setMethodNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; methodName_ = value; onChanged(); return this; } private boolean requestParam_ ; /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ public boolean hasRequestParam() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ public boolean getRequestParam() { return requestParam_; } /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ public Builder setRequestParam(boolean value) { bitField0_ |= 0x00000008; requestParam_ = value; onChanged(); return this; } /** * <pre> * If true, then a pb Message param follows. * </pre> * * <code>optional bool request_param = 4;</code> */ public Builder clearRequestParam() { bitField0_ = (bitField0_ & ~0x00000008); requestParam_ = false; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } else { return cellBlockMetaBuilder_.getMessage(); } } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cellBlockMeta_ = value; onChanged(); } else { cellBlockMetaBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = builderForValue.build(); onChanged(); } else { cellBlockMetaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && cellBlockMeta_ != null && cellBlockMeta_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) { cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(cellBlockMeta_).mergeFrom(value).buildPartial(); } else { cellBlockMeta_ = value; } onChanged(); } else { cellBlockMetaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = null; onChanged(); } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000010; onChanged(); return getCellBlockMetaFieldBuilder().getBuilder(); } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); } else { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 5;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { if (cellBlockMetaBuilder_ == null) { cellBlockMetaBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder>( getCellBlockMeta(), getParentForChildren(), isClean()); cellBlockMeta_ = null; } return cellBlockMetaBuilder_; } private int priority_ ; /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ public boolean hasPriority() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ public int getPriority() { return priority_; } /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ public Builder setPriority(int value) { bitField0_ |= 0x00000020; priority_ = value; onChanged(); return this; } /** * <pre> * 0 is NORMAL priority. 200 is HIGH. If no priority, treat it as NORMAL. * See HConstants. * </pre> * * <code>optional uint32 priority = 6;</code> */ public Builder clearPriority() { bitField0_ = (bitField0_ & ~0x00000020); priority_ = 0; onChanged(); return this; } private int timeout_ ; /** * <code>optional uint32 timeout = 7;</code> */ public boolean hasTimeout() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 timeout = 7;</code> */ public int getTimeout() { return timeout_; } /** * <code>optional uint32 timeout = 7;</code> */ public Builder setTimeout(int value) { bitField0_ |= 0x00000040; timeout_ = value; onChanged(); return this; } /** * <code>optional uint32 timeout = 7;</code> */ public Builder clearTimeout() { bitField0_ = (bitField0_ & ~0x00000040); timeout_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RequestHeader) } // @@protoc_insertion_point(class_scope:hbase.pb.RequestHeader) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RequestHeader> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RequestHeader>() { public RequestHeader parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RequestHeader(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RequestHeader> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RequestHeader> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResponseHeaderOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ResponseHeader) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional uint32 call_id = 1;</code> */ boolean hasCallId(); /** * <code>optional uint32 call_id = 1;</code> */ int getCallId(); /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ boolean hasException(); /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getException(); /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder(); /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ boolean hasCellBlockMeta(); /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta(); /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); } /** * Protobuf type {@code hbase.pb.ResponseHeader} */ public static final class ResponseHeader extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ResponseHeader) ResponseHeaderOrBuilder { // Use ResponseHeader.newBuilder() to construct. private ResponseHeader(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ResponseHeader() { callId_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ResponseHeader( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; callId_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = exception_.toBuilder(); } exception_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(exception_); exception_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = cellBlockMeta_.toBuilder(); } cellBlockMeta_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(cellBlockMeta_); cellBlockMeta_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); } private int bitField0_; public static final int CALL_ID_FIELD_NUMBER = 1; private int callId_; /** * <code>optional uint32 call_id = 1;</code> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 call_id = 1;</code> */ public int getCallId() { return callId_; } public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse exception_; /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getException() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } public static final int CELL_BLOCK_META_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_; /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getException()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, getCellBlockMeta()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getException()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getCellBlockMeta()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader) obj; boolean result = true; result = result && (hasCallId() == other.hasCallId()); if (hasCallId()) { result = result && (getCallId() == other.getCallId()); } result = result && (hasException() == other.hasException()); if (hasException()) { result = result && getException() .equals(other.getException()); } result = result && (hasCellBlockMeta() == other.hasCellBlockMeta()); if (hasCellBlockMeta()) { result = result && getCellBlockMeta() .equals(other.getCellBlockMeta()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCallId()) { hash = (37 * hash) + CALL_ID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } if (hasException()) { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } if (hasCellBlockMeta()) { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; hash = (53 * hash) + getCellBlockMeta().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.ResponseHeader} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ResponseHeader) org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeaderOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getExceptionFieldBuilder(); getCellBlockMetaFieldBuilder(); } } public Builder clear() { super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (exceptionBuilder_ == null) { exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = null; } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ResponseHeader_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader build() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (exceptionBuilder_ == null) { result.exception_ = exception_; } else { result.exception_ = exceptionBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (cellBlockMetaBuilder_ == null) { result.cellBlockMeta_ = cellBlockMeta_; } else { result.cellBlockMeta_ = cellBlockMetaBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance()) return this; if (other.hasCallId()) { setCallId(other.getCallId()); } if (other.hasException()) { mergeException(other.getException()); } if (other.hasCellBlockMeta()) { mergeCellBlockMeta(other.getCellBlockMeta()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int callId_ ; /** * <code>optional uint32 call_id = 1;</code> */ public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint32 call_id = 1;</code> */ public int getCallId() { return callId_; } /** * <code>optional uint32 call_id = 1;</code> */ public Builder setCallId(int value) { bitField0_ |= 0x00000001; callId_ = value; onChanged(); return this; } /** * <code>optional uint32 call_id = 1;</code> */ public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); callId_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse exception_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> exceptionBuilder_; /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse getException() { if (exceptionBuilder_ == null) { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exception_ = value; onChanged(); } else { exceptionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder builderForValue) { if (exceptionBuilder_ == null) { exception_ = builderForValue.build(); onChanged(); } else { exceptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder(exception_).mergeFrom(value).buildPartial(); } else { exception_ = value; } onChanged(); } else { exceptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExceptionFieldBuilder().getBuilder(); } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance() : exception_; } } /** * <pre> * If present, then request threw an exception and no response message (else we presume one) * </pre> * * <code>optional .hbase.pb.ExceptionResponse exception = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { exceptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder>( getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta cellBlockMeta_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> cellBlockMetaBuilder_; /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public boolean hasCellBlockMeta() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta getCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } else { return cellBlockMetaBuilder_.getMessage(); } } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public Builder setCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cellBlockMeta_ = value; onChanged(); } else { cellBlockMetaBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public Builder setCellBlockMeta( org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder builderForValue) { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = builderForValue.build(); onChanged(); } else { cellBlockMetaBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public Builder mergeCellBlockMeta(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta value) { if (cellBlockMetaBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && cellBlockMeta_ != null && cellBlockMeta_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) { cellBlockMeta_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder(cellBlockMeta_).mergeFrom(value).buildPartial(); } else { cellBlockMeta_ = value; } onChanged(); } else { cellBlockMetaBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public Builder clearCellBlockMeta() { if (cellBlockMetaBuilder_ == null) { cellBlockMeta_ = null; onChanged(); } else { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder getCellBlockMetaBuilder() { bitField0_ |= 0x00000004; onChanged(); return getCellBlockMetaFieldBuilder().getBuilder(); } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder() { if (cellBlockMetaBuilder_ != null) { return cellBlockMetaBuilder_.getMessageOrBuilder(); } else { return cellBlockMeta_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance() : cellBlockMeta_; } } /** * <pre> * If present, then an encoded data block follows. * </pre> * * <code>optional .hbase.pb.CellBlockMeta cell_block_meta = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder> getCellBlockMetaFieldBuilder() { if (cellBlockMetaBuilder_ == null) { cellBlockMetaBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder>( getCellBlockMeta(), getParentForChildren(), isClean()); cellBlockMeta_ = null; } return cellBlockMetaBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ResponseHeader) } // @@protoc_insertion_point(class_scope:hbase.pb.ResponseHeader) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ResponseHeader> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ResponseHeader>() { public ResponseHeader parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ResponseHeader(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ResponseHeader> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ResponseHeader> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_UserInformation_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_UserInformation_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ConnectionHeader_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellBlockMeta_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ExceptionResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CryptoCipherMeta_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RequestHeader_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RequestHeader_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ResponseHeader_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ResponseHeader_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\tRPC.proto\022\010hbase.pb\032\rTracing.proto\032\013HB" + "ase.proto\"<\n\017UserInformation\022\026\n\016effectiv" + "e_user\030\001 \002(\t\022\021\n\treal_user\030\002 \001(\t\"\362\001\n\020Conn" + "ectionHeader\022,\n\tuser_info\030\001 \001(\0132\031.hbase." + "pb.UserInformation\022\024\n\014service_name\030\002 \001(\t" + "\022\036\n\026cell_block_codec_class\030\003 \001(\t\022#\n\033cell" + "_block_compressor_class\030\004 \001(\t\022+\n\014version" + "_info\030\005 \001(\0132\025.hbase.pb.VersionInfo\022(\n rp" + "c_crypto_cipher_transformation\030\006 \001(\t\"R\n\030" + "ConnectionHeaderResponse\0226\n\022crypto_ciphe", "r_meta\030\001 \001(\0132\032.hbase.pb.CryptoCipherMeta" + "\"\037\n\rCellBlockMeta\022\016\n\006length\030\001 \001(\r\"|\n\021Exc" + "eptionResponse\022\034\n\024exception_class_name\030\001" + " \001(\t\022\023\n\013stack_trace\030\002 \001(\t\022\020\n\010hostname\030\003 " + "\001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_not_retry\030\005 \001(\010\"" + "f\n\020CryptoCipherMeta\022\026\n\016transformation\030\001 " + "\002(\t\022\r\n\005inKey\030\002 \001(\014\022\014\n\004inIv\030\003 \001(\014\022\016\n\006outK" + "ey\030\004 \001(\014\022\r\n\005outIv\030\005 \001(\014\"\311\001\n\rRequestHeade" + "r\022\017\n\007call_id\030\001 \001(\r\022&\n\ntrace_info\030\002 \001(\0132\022" + ".hbase.pb.RPCTInfo\022\023\n\013method_name\030\003 \001(\t\022", "\025\n\rrequest_param\030\004 \001(\010\0220\n\017cell_block_met" + "a\030\005 \001(\0132\027.hbase.pb.CellBlockMeta\022\020\n\010prio" + "rity\030\006 \001(\r\022\017\n\007timeout\030\007 \001(\r\"\203\001\n\016Response" + "Header\022\017\n\007call_id\030\001 \001(\r\022.\n\texception\030\002 \001" + "(\0132\033.hbase.pb.ExceptionResponse\0220\n\017cell_" + "block_meta\030\003 \001(\0132\027.hbase.pb.CellBlockMet" + "aBC\n1org.apache.hadoop.hbase.shaded.prot" + "obuf.generatedB\tRPCProtosH\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); internal_static_hbase_pb_UserInformation_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_UserInformation_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_UserInformation_descriptor, new java.lang.String[] { "EffectiveUser", "RealUser", }); internal_static_hbase_pb_ConnectionHeader_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ConnectionHeader_descriptor, new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", "RpcCryptoCipherTransformation", }); internal_static_hbase_pb_ConnectionHeaderResponse_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ConnectionHeaderResponse_descriptor, new java.lang.String[] { "CryptoCipherMeta", }); internal_static_hbase_pb_CellBlockMeta_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CellBlockMeta_descriptor, new java.lang.String[] { "Length", }); internal_static_hbase_pb_ExceptionResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ExceptionResponse_descriptor, new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); internal_static_hbase_pb_CryptoCipherMeta_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CryptoCipherMeta_descriptor, new java.lang.String[] { "Transformation", "InKey", "InIv", "OutKey", "OutIv", }); internal_static_hbase_pb_RequestHeader_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_RequestHeader_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RequestHeader_descriptor, new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", "Timeout", }); internal_static_hbase_pb_ResponseHeader_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_ResponseHeader_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ResponseHeader_descriptor, new java.lang.String[] { "CallId", "Exception", "CellBlockMeta", }); org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }