// Generated by the protocol buffer compiler. DO NOT EDIT! // source: Client.proto package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ClientProtos { private ClientProtos() {} public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); } /** * <pre> ** * Consistency defines the expected consistency level for an operation. * </pre> * * Protobuf enum {@code hbase.pb.Consistency} */ public enum Consistency implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>STRONG = 0;</code> */ STRONG(0), /** * <code>TIMELINE = 1;</code> */ TIMELINE(1), ; /** * <code>STRONG = 0;</code> */ public static final int STRONG_VALUE = 0; /** * <code>TIMELINE = 1;</code> */ public static final int TIMELINE_VALUE = 1; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Consistency valueOf(int value) { return forNumber(value); } public static Consistency forNumber(int value) { switch (value) { case 0: return STRONG; case 1: return TIMELINE; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Consistency> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< Consistency> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Consistency>() { public Consistency findValueByNumber(int number) { return Consistency.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.getDescriptor().getEnumTypes().get(0); } private static final Consistency[] VALUES = values(); public static Consistency valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private Consistency(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.Consistency) } public interface AuthorizationsOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Authorizations) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated string label = 1;</code> */ java.util.List<java.lang.String> getLabelList(); /** * <code>repeated string label = 1;</code> */ int getLabelCount(); /** * <code>repeated string label = 1;</code> */ java.lang.String getLabel(int index); /** * <code>repeated string label = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLabelBytes(int index); } /** * <pre> ** * The protocol buffer version of Authorizations. * </pre> * * Protobuf type {@code hbase.pb.Authorizations} */ public static final class Authorizations extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Authorizations) AuthorizationsOrBuilder { // Use Authorizations.newBuilder() to construct. private Authorizations(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Authorizations() { label_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Authorizations( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { label_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } label_.add(bs); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { label_ = label_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.Builder.class); } public static final int LABEL_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList label_; /** * <code>repeated string label = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getLabelList() { return label_; } /** * <code>repeated string label = 1;</code> */ public int getLabelCount() { return label_.size(); } /** * <code>repeated string label = 1;</code> */ public java.lang.String getLabel(int index) { return label_.get(index); } /** * <code>repeated string label = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLabelBytes(int index) { return label_.getByteString(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < label_.size(); i++) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, label_.getRaw(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < label_.size(); i++) { dataSize += computeStringSizeNoTag(label_.getRaw(i)); } size += dataSize; size += 1 * getLabelList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations) obj; boolean result = true; result = result && getLabelList() .equals(other.getLabelList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLabelCount() > 0) { hash = (37 * hash) + LABEL_FIELD_NUMBER; hash = (53 * hash) + getLabelList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The protocol buffer version of Authorizations. * </pre> * * Protobuf type {@code hbase.pb.Authorizations} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Authorizations) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.AuthorizationsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); label_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Authorizations_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { label_ = label_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.label_ = label_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this; if (!other.label_.isEmpty()) { if (label_.isEmpty()) { label_ = other.label_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureLabelIsMutable(); label_.addAll(other.label_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList label_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureLabelIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { label_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(label_); bitField0_ |= 0x00000001; } } /** * <code>repeated string label = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList getLabelList() { return label_.getUnmodifiableView(); } /** * <code>repeated string label = 1;</code> */ public int getLabelCount() { return label_.size(); } /** * <code>repeated string label = 1;</code> */ public java.lang.String getLabel(int index) { return label_.get(index); } /** * <code>repeated string label = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getLabelBytes(int index) { return label_.getByteString(index); } /** * <code>repeated string label = 1;</code> */ public Builder setLabel( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLabelIsMutable(); label_.set(index, value); onChanged(); return this; } /** * <code>repeated string label = 1;</code> */ public Builder addLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLabelIsMutable(); label_.add(value); onChanged(); return this; } /** * <code>repeated string label = 1;</code> */ public Builder addAllLabel( java.lang.Iterable<java.lang.String> values) { ensureLabelIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, label_); onChanged(); return this; } /** * <code>repeated string label = 1;</code> */ public Builder clearLabel() { label_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <code>repeated string label = 1;</code> */ public Builder addLabelBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureLabelIsMutable(); label_.add(value); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Authorizations) } // @@protoc_insertion_point(class_scope:hbase.pb.Authorizations) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Authorizations> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Authorizations>() { public Authorizations parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Authorizations(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Authorizations> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Authorizations> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CellVisibilityOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CellVisibility) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string expression = 1;</code> */ boolean hasExpression(); /** * <code>required string expression = 1;</code> */ java.lang.String getExpression(); /** * <code>required string expression = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExpressionBytes(); } /** * <pre> ** * The protocol buffer version of CellVisibility. * </pre> * * Protobuf type {@code hbase.pb.CellVisibility} */ public static final class CellVisibility extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CellVisibility) CellVisibilityOrBuilder { // Use CellVisibility.newBuilder() to construct. private CellVisibility(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CellVisibility() { expression_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CellVisibility( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; expression_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.Builder.class); } private int bitField0_; public static final int EXPRESSION_FIELD_NUMBER = 1; private volatile java.lang.Object expression_; /** * <code>required string expression = 1;</code> */ public boolean hasExpression() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string expression = 1;</code> */ public java.lang.String getExpression() { java.lang.Object ref = expression_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { expression_ = s; } return s; } } /** * <code>required string expression = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExpressionBytes() { java.lang.Object ref = expression_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); expression_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasExpression()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, expression_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, expression_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility) obj; boolean result = true; result = result && (hasExpression() == other.hasExpression()); if (hasExpression()) { result = result && getExpression() .equals(other.getExpression()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasExpression()) { hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getExpression().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The protocol buffer version of CellVisibility. * </pre> * * Protobuf type {@code hbase.pb.CellVisibility} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CellVisibility) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibilityOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); expression_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CellVisibility_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.expression_ = expression_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this; if (other.hasExpression()) { bitField0_ |= 0x00000001; expression_ = other.expression_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasExpression()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object expression_ = ""; /** * <code>required string expression = 1;</code> */ public boolean hasExpression() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string expression = 1;</code> */ public java.lang.String getExpression() { java.lang.Object ref = expression_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { expression_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string expression = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getExpressionBytes() { java.lang.Object ref = expression_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); expression_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string expression = 1;</code> */ public Builder setExpression( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; expression_ = value; onChanged(); return this; } /** * <code>required string expression = 1;</code> */ public Builder clearExpression() { bitField0_ = (bitField0_ & ~0x00000001); expression_ = getDefaultInstance().getExpression(); onChanged(); return this; } /** * <code>required string expression = 1;</code> */ public Builder setExpressionBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; expression_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CellVisibility) } // @@protoc_insertion_point(class_scope:hbase.pb.CellVisibility) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CellVisibility> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CellVisibility>() { public CellVisibility parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CellVisibility(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CellVisibility> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CellVisibility> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ColumnOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Column) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes family = 1;</code> */ boolean hasFamily(); /** * <code>required bytes family = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily(); /** * <code>repeated bytes qualifier = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString> getQualifierList(); /** * <code>repeated bytes qualifier = 2;</code> */ int getQualifierCount(); /** * <code>repeated bytes qualifier = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier(int index); } /** * <pre> ** * Container for a list of column qualifier names of a family. * </pre> * * Protobuf type {@code hbase.pb.Column} */ public static final class Column extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Column) ColumnOrBuilder { // Use Column.newBuilder() to construct. private Column(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Column() { family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; qualifier_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Column( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; family_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { qualifier_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString>(); mutable_bitField0_ |= 0x00000002; } qualifier_.add(input.readBytes()); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { qualifier_ = java.util.Collections.unmodifiableList(qualifier_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder.class); } private int bitField0_; public static final int FAMILY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } public static final int QUALIFIER_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString> qualifier_; /** * <code>repeated bytes qualifier = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString> getQualifierList() { return qualifier_; } /** * <code>repeated bytes qualifier = 2;</code> */ public int getQualifierCount() { return qualifier_.size(); } /** * <code>repeated bytes qualifier = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier(int index) { return qualifier_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } for (int i = 0; i < qualifier_.size(); i++) { output.writeBytes(2, qualifier_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, family_); } { int dataSize = 0; for (int i = 0; i < qualifier_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(qualifier_.get(i)); } size += dataSize; size += 1 * getQualifierList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { result = result && getFamily() .equals(other.getFamily()); } result = result && getQualifierList() .equals(other.getQualifierList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } if (getQualifierCount() > 0) { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifierList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Container for a list of column qualifier names of a family. * </pre> * * Protobuf type {@code hbase.pb.Column} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Column) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); qualifier_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Column_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.family_ = family_; if (((bitField0_ & 0x00000002) == 0x00000002)) { qualifier_ = java.util.Collections.unmodifiableList(qualifier_); bitField0_ = (bitField0_ & ~0x00000002); } result.qualifier_ = qualifier_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } if (!other.qualifier_.isEmpty()) { if (qualifier_.isEmpty()) { qualifier_ = other.qualifier_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureQualifierIsMutable(); qualifier_.addAll(other.qualifier_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } /** * <code>required bytes family = 1;</code> */ public Builder setFamily(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; family_ = value; onChanged(); return this; } /** * <code>required bytes family = 1;</code> */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString> qualifier_ = java.util.Collections.emptyList(); private void ensureQualifierIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { qualifier_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString>(qualifier_); bitField0_ |= 0x00000002; } } /** * <code>repeated bytes qualifier = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString> getQualifierList() { return java.util.Collections.unmodifiableList(qualifier_); } /** * <code>repeated bytes qualifier = 2;</code> */ public int getQualifierCount() { return qualifier_.size(); } /** * <code>repeated bytes qualifier = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier(int index) { return qualifier_.get(index); } /** * <code>repeated bytes qualifier = 2;</code> */ public Builder setQualifier( int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureQualifierIsMutable(); qualifier_.set(index, value); onChanged(); return this; } /** * <code>repeated bytes qualifier = 2;</code> */ public Builder addQualifier(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureQualifierIsMutable(); qualifier_.add(value); onChanged(); return this; } /** * <code>repeated bytes qualifier = 2;</code> */ public Builder addAllQualifier( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString> values) { ensureQualifierIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, qualifier_); onChanged(); return this; } /** * <code>repeated bytes qualifier = 2;</code> */ public Builder clearQualifier() { qualifier_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Column) } // @@protoc_insertion_point(class_scope:hbase.pb.Column) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Column> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Column>() { public Column parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Column(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Column> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Column> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Get) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes row = 1;</code> */ boolean hasRow(); /** * <code>required bytes row = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow(); /** * <code>repeated .hbase.pb.Column column = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> getColumnList(); /** * <code>repeated .hbase.pb.Column column = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getColumn(int index); /** * <code>repeated .hbase.pb.Column column = 2;</code> */ int getColumnCount(); /** * <code>repeated .hbase.pb.Column column = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList(); /** * <code>repeated .hbase.pb.Column column = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ int getAttributeCount(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ boolean hasFilter(); /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter(); /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ boolean hasTimeRange(); /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ boolean hasMaxVersions(); /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ int getMaxVersions(); /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ boolean hasCacheBlocks(); /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ boolean getCacheBlocks(); /** * <code>optional uint32 store_limit = 8;</code> */ boolean hasStoreLimit(); /** * <code>optional uint32 store_limit = 8;</code> */ int getStoreLimit(); /** * <code>optional uint32 store_offset = 9;</code> */ boolean hasStoreOffset(); /** * <code>optional uint32 store_offset = 9;</code> */ int getStoreOffset(); /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ boolean hasExistenceOnly(); /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ boolean getExistenceOnly(); /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ boolean hasConsistency(); /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ int getCfTimeRangeCount(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( int index); /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ boolean hasLoadColumnFamiliesOnDemand(); /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ boolean getLoadColumnFamiliesOnDemand(); } /** * <pre> ** * The protocol buffer version of Get. * Unless existence_only is specified, return all the requested data * for the row that matches exactly. * </pre> * * Protobuf type {@code hbase.pb.Get} */ public static final class Get extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Get) GetOrBuilder { // Use Get.newBuilder() to construct. private Get(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Get() { row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; column_ = java.util.Collections.emptyList(); attribute_ = java.util.Collections.emptyList(); maxVersions_ = 1; cacheBlocks_ = true; storeLimit_ = 0; storeOffset_ = 0; existenceOnly_ = false; consistency_ = 0; cfTimeRange_ = java.util.Collections.emptyList(); loadColumnFamiliesOnDemand_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Get( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; row_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { column_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column>(); mutable_bitField0_ |= 0x00000002; } column_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair>(); mutable_bitField0_ |= 0x00000004; } attribute_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); break; } case 34: { org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = filter_.toBuilder(); } filter_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(filter_); filter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = timeRange_.toBuilder(); } timeRange_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timeRange_); timeRange_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 48: { bitField0_ |= 0x00000008; maxVersions_ = input.readUInt32(); break; } case 56: { bitField0_ |= 0x00000010; cacheBlocks_ = input.readBool(); break; } case 64: { bitField0_ |= 0x00000020; storeLimit_ = input.readUInt32(); break; } case 72: { bitField0_ |= 0x00000040; storeOffset_ = input.readUInt32(); break; } case 80: { bitField0_ |= 0x00000080; existenceOnly_ = input.readBool(); break; } case 96: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(12, rawValue); } else { bitField0_ |= 0x00000100; consistency_ = rawValue; } break; } case 106: { if (!((mutable_bitField0_ & 0x00000800) == 0x00000800)) { cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(); mutable_bitField0_ |= 0x00000800; } cfTimeRange_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); break; } case 112: { bitField0_ |= 0x00000200; loadColumnFamiliesOnDemand_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { column_ = java.util.Collections.unmodifiableList(column_); } if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { attribute_ = java.util.Collections.unmodifiableList(attribute_); } if (((mutable_bitField0_ & 0x00000800) == 0x00000800)) { cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder.class); } private int bitField0_; public static final int ROW_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_; /** * <code>required bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } public static final int COLUMN_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> column_; /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> getColumnList() { return column_; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList() { return column_; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public int getColumnCount() { return column_.size(); } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getColumn(int index) { return column_.get(index); } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { return column_.get(index); } public static final int ATTRIBUTE_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> attribute_; /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { return attribute_; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList() { return attribute_; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public int getAttributeCount() { return attribute_.size(); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } public static final int FILTER_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } public static final int TIME_RANGE_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } public static final int MAX_VERSIONS_FIELD_NUMBER = 6; private int maxVersions_; /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ public int getMaxVersions() { return maxVersions_; } public static final int CACHE_BLOCKS_FIELD_NUMBER = 7; private boolean cacheBlocks_; /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ public boolean getCacheBlocks() { return cacheBlocks_; } public static final int STORE_LIMIT_FIELD_NUMBER = 8; private int storeLimit_; /** * <code>optional uint32 store_limit = 8;</code> */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 store_limit = 8;</code> */ public int getStoreLimit() { return storeLimit_; } public static final int STORE_OFFSET_FIELD_NUMBER = 9; private int storeOffset_; /** * <code>optional uint32 store_offset = 9;</code> */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 store_offset = 9;</code> */ public int getStoreOffset() { return storeOffset_; } public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10; private boolean existenceOnly_; /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ public boolean getExistenceOnly() { return existenceOnly_; } public static final int CONSISTENCY_FIELD_NUMBER = 12; private int consistency_; /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ public boolean hasConsistency() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } public static final int CF_TIME_RANGE_FIELD_NUMBER = 13; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_; /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { return cfTimeRange_; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList() { return cfTimeRange_; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public int getCfTimeRangeCount() { return cfTimeRange_.size(); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { return cfTimeRange_.get(index); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( int index) { return cfTimeRange_.get(index); } public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 14; private boolean loadColumnFamiliesOnDemand_; /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasFilter()) { if (!getFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getCfTimeRangeCount(); i++) { if (!getCfTimeRange(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } for (int i = 0; i < column_.size(); i++) { output.writeMessage(2, column_.get(i)); } for (int i = 0; i < attribute_.size(); i++) { output.writeMessage(3, attribute_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(4, getFilter()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(5, getTimeRange()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt32(6, maxVersions_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(7, cacheBlocks_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(8, storeLimit_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(9, storeOffset_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeBool(10, existenceOnly_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeEnum(12, consistency_); } for (int i = 0; i < cfTimeRange_.size(); i++) { output.writeMessage(13, cfTimeRange_.get(i)); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeBool(14, loadColumnFamiliesOnDemand_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, row_); } for (int i = 0; i < column_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, column_.get(i)); } for (int i = 0; i < attribute_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, attribute_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, getFilter()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getTimeRange()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(6, maxVersions_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(7, cacheBlocks_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(8, storeLimit_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(9, storeOffset_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(10, existenceOnly_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(12, consistency_); } for (int i = 0; i < cfTimeRange_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(13, cfTimeRange_.get(i)); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(14, loadColumnFamiliesOnDemand_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { result = result && getRow() .equals(other.getRow()); } result = result && getColumnList() .equals(other.getColumnList()); result = result && getAttributeList() .equals(other.getAttributeList()); result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { result = result && getFilter() .equals(other.getFilter()); } result = result && (hasTimeRange() == other.hasTimeRange()); if (hasTimeRange()) { result = result && getTimeRange() .equals(other.getTimeRange()); } result = result && (hasMaxVersions() == other.hasMaxVersions()); if (hasMaxVersions()) { result = result && (getMaxVersions() == other.getMaxVersions()); } result = result && (hasCacheBlocks() == other.hasCacheBlocks()); if (hasCacheBlocks()) { result = result && (getCacheBlocks() == other.getCacheBlocks()); } result = result && (hasStoreLimit() == other.hasStoreLimit()); if (hasStoreLimit()) { result = result && (getStoreLimit() == other.getStoreLimit()); } result = result && (hasStoreOffset() == other.hasStoreOffset()); if (hasStoreOffset()) { result = result && (getStoreOffset() == other.getStoreOffset()); } result = result && (hasExistenceOnly() == other.hasExistenceOnly()); if (hasExistenceOnly()) { result = result && (getExistenceOnly() == other.getExistenceOnly()); } result = result && (hasConsistency() == other.hasConsistency()); if (hasConsistency()) { result = result && consistency_ == other.consistency_; } result = result && getCfTimeRangeList() .equals(other.getCfTimeRangeList()); result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand()); if (hasLoadColumnFamiliesOnDemand()) { result = result && (getLoadColumnFamiliesOnDemand() == other.getLoadColumnFamiliesOnDemand()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRow()) { hash = (37 * hash) + ROW_FIELD_NUMBER; hash = (53 * hash) + getRow().hashCode(); } if (getColumnCount() > 0) { hash = (37 * hash) + COLUMN_FIELD_NUMBER; hash = (53 * hash) + getColumnList().hashCode(); } if (getAttributeCount() > 0) { hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; hash = (53 * hash) + getAttributeList().hashCode(); } if (hasFilter()) { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } if (hasTimeRange()) { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getTimeRange().hashCode(); } if (hasMaxVersions()) { hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER; hash = (53 * hash) + getMaxVersions(); } if (hasCacheBlocks()) { hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getCacheBlocks()); } if (hasStoreLimit()) { hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER; hash = (53 * hash) + getStoreLimit(); } if (hasStoreOffset()) { hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getStoreOffset(); } if (hasExistenceOnly()) { hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getExistenceOnly()); } if (hasConsistency()) { hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER; hash = (53 * hash) + consistency_; } if (getCfTimeRangeCount() > 0) { hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getCfTimeRangeList().hashCode(); } if (hasLoadColumnFamiliesOnDemand()) { hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getLoadColumnFamiliesOnDemand()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The protocol buffer version of Get. * Unless existence_only is specified, return all the requested data * for the row that matches exactly. * </pre> * * Protobuf type {@code hbase.pb.Get} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Get) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getColumnFieldBuilder(); getAttributeFieldBuilder(); getFilterFieldBuilder(); getTimeRangeFieldBuilder(); getCfTimeRangeFieldBuilder(); } } public Builder clear() { super.clear(); row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (columnBuilder_ == null) { column_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { columnBuilder_.clear(); } if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { attributeBuilder_.clear(); } if (filterBuilder_ == null) { filter_ = null; } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); if (timeRangeBuilder_ == null) { timeRange_ = null; } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); maxVersions_ = 1; bitField0_ = (bitField0_ & ~0x00000020); cacheBlocks_ = true; bitField0_ = (bitField0_ & ~0x00000040); storeLimit_ = 0; bitField0_ = (bitField0_ & ~0x00000080); storeOffset_ = 0; bitField0_ = (bitField0_ & ~0x00000100); existenceOnly_ = false; bitField0_ = (bitField0_ & ~0x00000200); consistency_ = 0; bitField0_ = (bitField0_ & ~0x00000400); if (cfTimeRangeBuilder_ == null) { cfTimeRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000800); } else { cfTimeRangeBuilder_.clear(); } loadColumnFamiliesOnDemand_ = false; bitField0_ = (bitField0_ & ~0x00001000); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Get_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.row_ = row_; if (columnBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { column_ = java.util.Collections.unmodifiableList(column_); bitField0_ = (bitField0_ & ~0x00000002); } result.column_ = column_; } else { result.column_ = columnBuilder_.build(); } if (attributeBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { attribute_ = java.util.Collections.unmodifiableList(attribute_); bitField0_ = (bitField0_ & ~0x00000004); } result.attribute_ = attribute_; } else { result.attribute_ = attributeBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000002; } if (filterBuilder_ == null) { result.filter_ = filter_; } else { result.filter_ = filterBuilder_.build(); } if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000004; } if (timeRangeBuilder_ == null) { result.timeRange_ = timeRange_; } else { result.timeRange_ = timeRangeBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000008; } result.maxVersions_ = maxVersions_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000010; } result.cacheBlocks_ = cacheBlocks_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000020; } result.storeLimit_ = storeLimit_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000040; } result.storeOffset_ = storeOffset_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000080; } result.existenceOnly_ = existenceOnly_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000100; } result.consistency_ = consistency_; if (cfTimeRangeBuilder_ == null) { if (((bitField0_ & 0x00000800) == 0x00000800)) { cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); bitField0_ = (bitField0_ & ~0x00000800); } result.cfTimeRange_ = cfTimeRange_; } else { result.cfTimeRange_ = cfTimeRangeBuilder_.build(); } if (((from_bitField0_ & 0x00001000) == 0x00001000)) { to_bitField0_ |= 0x00000200; } result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } if (columnBuilder_ == null) { if (!other.column_.isEmpty()) { if (column_.isEmpty()) { column_ = other.column_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureColumnIsMutable(); column_.addAll(other.column_); } onChanged(); } } else { if (!other.column_.isEmpty()) { if (columnBuilder_.isEmpty()) { columnBuilder_.dispose(); columnBuilder_ = null; column_ = other.column_; bitField0_ = (bitField0_ & ~0x00000002); columnBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnFieldBuilder() : null; } else { columnBuilder_.addAllMessages(other.column_); } } } if (attributeBuilder_ == null) { if (!other.attribute_.isEmpty()) { if (attribute_.isEmpty()) { attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureAttributeIsMutable(); attribute_.addAll(other.attribute_); } onChanged(); } } else { if (!other.attribute_.isEmpty()) { if (attributeBuilder_.isEmpty()) { attributeBuilder_.dispose(); attributeBuilder_ = null; attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000004); attributeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributeFieldBuilder() : null; } else { attributeBuilder_.addAllMessages(other.attribute_); } } } if (other.hasFilter()) { mergeFilter(other.getFilter()); } if (other.hasTimeRange()) { mergeTimeRange(other.getTimeRange()); } if (other.hasMaxVersions()) { setMaxVersions(other.getMaxVersions()); } if (other.hasCacheBlocks()) { setCacheBlocks(other.getCacheBlocks()); } if (other.hasStoreLimit()) { setStoreLimit(other.getStoreLimit()); } if (other.hasStoreOffset()) { setStoreOffset(other.getStoreOffset()); } if (other.hasExistenceOnly()) { setExistenceOnly(other.getExistenceOnly()); } if (other.hasConsistency()) { setConsistency(other.getConsistency()); } if (cfTimeRangeBuilder_ == null) { if (!other.cfTimeRange_.isEmpty()) { if (cfTimeRange_.isEmpty()) { cfTimeRange_ = other.cfTimeRange_; bitField0_ = (bitField0_ & ~0x00000800); } else { ensureCfTimeRangeIsMutable(); cfTimeRange_.addAll(other.cfTimeRange_); } onChanged(); } } else { if (!other.cfTimeRange_.isEmpty()) { if (cfTimeRangeBuilder_.isEmpty()) { cfTimeRangeBuilder_.dispose(); cfTimeRangeBuilder_ = null; cfTimeRange_ = other.cfTimeRange_; bitField0_ = (bitField0_ & ~0x00000800); cfTimeRangeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCfTimeRangeFieldBuilder() : null; } else { cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_); } } } if (other.hasLoadColumnFamiliesOnDemand()) { setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { return false; } for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { return false; } } if (hasFilter()) { if (!getFilter().isInitialized()) { return false; } } for (int i = 0; i < getCfTimeRangeCount(); i++) { if (!getCfTimeRange(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } /** * <code>required bytes row = 1;</code> */ public Builder setRow(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; row_ = value; onChanged(); return this; } /** * <code>required bytes row = 1;</code> */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> column_ = java.util.Collections.emptyList(); private void ensureColumnIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { column_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column>(column_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> getColumnList() { if (columnBuilder_ == null) { return java.util.Collections.unmodifiableList(column_); } else { return columnBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public int getColumnCount() { if (columnBuilder_ == null) { return column_.size(); } else { return columnBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getColumn(int index) { if (columnBuilder_ == null) { return column_.get(index); } else { return columnBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder setColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnIsMutable(); column_.set(index, value); onChanged(); } else { columnBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder setColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.set(index, builderForValue.build()); onChanged(); } else { columnBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder addColumn(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnIsMutable(); column_.add(value); onChanged(); } else { columnBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder addColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnIsMutable(); column_.add(index, value); onChanged(); } else { columnBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder addColumn( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(builderForValue.build()); onChanged(); } else { columnBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder addColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(index, builderForValue.build()); onChanged(); } else { columnBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder addAllColumn( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> values) { if (columnBuilder_ == null) { ensureColumnIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, column_); onChanged(); } else { columnBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder clearColumn() { if (columnBuilder_ == null) { column_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { columnBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public Builder removeColumn(int index) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.remove(index); onChanged(); } else { columnBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( int index) { return getColumnFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { if (columnBuilder_ == null) { return column_.get(index); } else { return columnBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList() { if (columnBuilder_ != null) { return columnBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(column_); } } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { return getColumnFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( int index) { return getColumnFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Column column = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder> getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder() { if (columnBuilder_ == null) { columnBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder>( column_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); column_ = null; } return columnBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); } else { return attributeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public int getAttributeCount() { if (attributeBuilder_ == null) { return attribute_.size(); } else { return attributeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder setAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.set(index, value); onChanged(); } else { attributeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder setAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.set(index, builderForValue.build()); onChanged(); } else { attributeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder addAttribute(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.add(value); onChanged(); } else { attributeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder addAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.add(index, value); onChanged(); } else { attributeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder addAttribute( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(builderForValue.build()); onChanged(); } else { attributeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder addAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(index, builderForValue.build()); onChanged(); } else { attributeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, attribute_); onChanged(); } else { attributeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder clearAttribute() { if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { attributeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public Builder removeAttribute(int index) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.remove(index); onChanged(); } else { attributeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList() { if (attributeBuilder_ != null) { return attributeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attribute_); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder> getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { attributeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); attribute_ = null; } return attributeBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public Builder setFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); } else { filterBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public Builder setFilter( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } onChanged(); } else { filterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = null; onChanged(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000008; onChanged(); return getFilterFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * <code>optional .hbase.pb.Filter filter = 4;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( getFilter(), getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public Builder setTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeRange_ = value; onChanged(); } else { timeRangeBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public Builder setTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { timeRange_ = builderForValue.build(); onChanged(); } else { timeRangeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); } else { timeRange_ = value; } onChanged(); } else { timeRangeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000010; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * <code>optional .hbase.pb.TimeRange time_range = 5;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { timeRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; } return timeRangeBuilder_; } private int maxVersions_ = 1; /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ public int getMaxVersions() { return maxVersions_; } /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ public Builder setMaxVersions(int value) { bitField0_ |= 0x00000020; maxVersions_ = value; onChanged(); return this; } /** * <code>optional uint32 max_versions = 6 [default = 1];</code> */ public Builder clearMaxVersions() { bitField0_ = (bitField0_ & ~0x00000020); maxVersions_ = 1; onChanged(); return this; } private boolean cacheBlocks_ = true; /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ public boolean getCacheBlocks() { return cacheBlocks_; } /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ public Builder setCacheBlocks(boolean value) { bitField0_ |= 0x00000040; cacheBlocks_ = value; onChanged(); return this; } /** * <code>optional bool cache_blocks = 7 [default = true];</code> */ public Builder clearCacheBlocks() { bitField0_ = (bitField0_ & ~0x00000040); cacheBlocks_ = true; onChanged(); return this; } private int storeLimit_ ; /** * <code>optional uint32 store_limit = 8;</code> */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint32 store_limit = 8;</code> */ public int getStoreLimit() { return storeLimit_; } /** * <code>optional uint32 store_limit = 8;</code> */ public Builder setStoreLimit(int value) { bitField0_ |= 0x00000080; storeLimit_ = value; onChanged(); return this; } /** * <code>optional uint32 store_limit = 8;</code> */ public Builder clearStoreLimit() { bitField0_ = (bitField0_ & ~0x00000080); storeLimit_ = 0; onChanged(); return this; } private int storeOffset_ ; /** * <code>optional uint32 store_offset = 9;</code> */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint32 store_offset = 9;</code> */ public int getStoreOffset() { return storeOffset_; } /** * <code>optional uint32 store_offset = 9;</code> */ public Builder setStoreOffset(int value) { bitField0_ |= 0x00000100; storeOffset_ = value; onChanged(); return this; } /** * <code>optional uint32 store_offset = 9;</code> */ public Builder clearStoreOffset() { bitField0_ = (bitField0_ & ~0x00000100); storeOffset_ = 0; onChanged(); return this; } private boolean existenceOnly_ ; /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ public boolean hasExistenceOnly() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ public boolean getExistenceOnly() { return existenceOnly_; } /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ public Builder setExistenceOnly(boolean value) { bitField0_ |= 0x00000200; existenceOnly_ = value; onChanged(); return this; } /** * <pre> * The result isn't asked for, just check for * the existence. * </pre> * * <code>optional bool existence_only = 10 [default = false];</code> */ public Builder clearExistenceOnly() { bitField0_ = (bitField0_ & ~0x00000200); existenceOnly_ = false; onChanged(); return this; } private int consistency_ = 0; /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ public boolean hasConsistency() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ public Builder setConsistency(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000400; consistency_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.Consistency consistency = 12 [default = STRONG];</code> */ public Builder clearConsistency() { bitField0_ = (bitField0_ & ~0x00000400); consistency_ = 0; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ = java.util.Collections.emptyList(); private void ensureCfTimeRangeIsMutable() { if (!((bitField0_ & 0x00000800) == 0x00000800)) { cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_); bitField0_ |= 0x00000800; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_; /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { if (cfTimeRangeBuilder_ == null) { return java.util.Collections.unmodifiableList(cfTimeRange_); } else { return cfTimeRangeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public int getCfTimeRangeCount() { if (cfTimeRangeBuilder_ == null) { return cfTimeRange_.size(); } else { return cfTimeRangeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { if (cfTimeRangeBuilder_ == null) { return cfTimeRange_.get(index); } else { return cfTimeRangeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder setCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { if (cfTimeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCfTimeRangeIsMutable(); cfTimeRange_.set(index, value); onChanged(); } else { cfTimeRangeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder setCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.set(index, builderForValue.build()); onChanged(); } else { cfTimeRangeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder addCfTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { if (cfTimeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCfTimeRangeIsMutable(); cfTimeRange_.add(value); onChanged(); } else { cfTimeRangeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder addCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { if (cfTimeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCfTimeRangeIsMutable(); cfTimeRange_.add(index, value); onChanged(); } else { cfTimeRangeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder addCfTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.add(builderForValue.build()); onChanged(); } else { cfTimeRangeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder addCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.add(index, builderForValue.build()); onChanged(); } else { cfTimeRangeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder addAllCfTimeRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, cfTimeRange_); onChanged(); } else { cfTimeRangeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder clearCfTimeRange() { if (cfTimeRangeBuilder_ == null) { cfTimeRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000800); onChanged(); } else { cfTimeRangeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public Builder removeCfTimeRange(int index) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.remove(index); onChanged(); } else { cfTimeRangeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder( int index) { return getCfTimeRangeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( int index) { if (cfTimeRangeBuilder_ == null) { return cfTimeRange_.get(index); } else { return cfTimeRangeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList() { if (cfTimeRangeBuilder_ != null) { return cfTimeRangeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cfTimeRange_); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() { return getCfTimeRangeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder( int index) { return getCfTimeRangeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 13;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder> getCfTimeRangeBuilderList() { return getCfTimeRangeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeFieldBuilder() { if (cfTimeRangeBuilder_ == null) { cfTimeRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>( cfTimeRange_, ((bitField0_ & 0x00000800) == 0x00000800), getParentForChildren(), isClean()); cfTimeRange_ = null; } return cfTimeRangeBuilder_; } private boolean loadColumnFamiliesOnDemand_ ; /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ public Builder setLoadColumnFamiliesOnDemand(boolean value) { bitField0_ |= 0x00001000; loadColumnFamiliesOnDemand_ = value; onChanged(); return this; } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 14;</code> */ public Builder clearLoadColumnFamiliesOnDemand() { bitField0_ = (bitField0_ & ~0x00001000); loadColumnFamiliesOnDemand_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Get) } // @@protoc_insertion_point(class_scope:hbase.pb.Get) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Get> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Get>() { public Get parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Get(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Get> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Get> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResultOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Result) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell> getCellList(); /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getCell(int index); /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ int getCellCount(); /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> getCellOrBuilderList(); /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( int index); /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ boolean hasAssociatedCellCount(); /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ int getAssociatedCellCount(); /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ boolean hasExists(); /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ boolean getExists(); /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ boolean hasStale(); /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ boolean getStale(); /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ boolean hasPartial(); /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ boolean getPartial(); } /** * Protobuf type {@code hbase.pb.Result} */ public static final class Result extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Result) ResultOrBuilder { // Use Result.newBuilder() to construct. private Result(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Result() { cell_ = java.util.Collections.emptyList(); associatedCellCount_ = 0; exists_ = false; stale_ = false; partial_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Result( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell>(); mutable_bitField0_ |= 0x00000001; } cell_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry)); break; } case 16: { bitField0_ |= 0x00000001; associatedCellCount_ = input.readInt32(); break; } case 24: { bitField0_ |= 0x00000002; exists_ = input.readBool(); break; } case 32: { bitField0_ |= 0x00000004; stale_ = input.readBool(); break; } case 40: { bitField0_ |= 0x00000008; partial_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { cell_ = java.util.Collections.unmodifiableList(cell_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder.class); } private int bitField0_; public static final int CELL_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell> cell_; /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell> getCellList() { return cell_; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> getCellOrBuilderList() { return cell_; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public int getCellCount() { return cell_.size(); } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getCell(int index) { return cell_.get(index); } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( int index) { return cell_.get(index); } public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2; private int associatedCellCount_; /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ public int getAssociatedCellCount() { return associatedCellCount_; } public static final int EXISTS_FIELD_NUMBER = 3; private boolean exists_; /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ public boolean hasExists() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ public boolean getExists() { return exists_; } public static final int STALE_FIELD_NUMBER = 4; private boolean stale_; /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ public boolean hasStale() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ public boolean getStale() { return stale_; } public static final int PARTIAL_FIELD_NUMBER = 5; private boolean partial_; /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ public boolean hasPartial() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ public boolean getPartial() { return partial_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < cell_.size(); i++) { output.writeMessage(1, cell_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(2, associatedCellCount_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, exists_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(4, stale_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(5, partial_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < cell_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, cell_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(2, associatedCellCount_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, exists_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(4, stale_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(5, partial_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result) obj; boolean result = true; result = result && getCellList() .equals(other.getCellList()); result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount()); if (hasAssociatedCellCount()) { result = result && (getAssociatedCellCount() == other.getAssociatedCellCount()); } result = result && (hasExists() == other.hasExists()); if (hasExists()) { result = result && (getExists() == other.getExists()); } result = result && (hasStale() == other.hasStale()); if (hasStale()) { result = result && (getStale() == other.getStale()); } result = result && (hasPartial() == other.hasPartial()); if (hasPartial()) { result = result && (getPartial() == other.getPartial()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCellCount() > 0) { hash = (37 * hash) + CELL_FIELD_NUMBER; hash = (53 * hash) + getCellList().hashCode(); } if (hasAssociatedCellCount()) { hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER; hash = (53 * hash) + getAssociatedCellCount(); } if (hasExists()) { hash = (37 * hash) + EXISTS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getExists()); } if (hasStale()) { hash = (37 * hash) + STALE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getStale()); } if (hasPartial()) { hash = (37 * hash) + PARTIAL_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getPartial()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.Result} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Result) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getCellFieldBuilder(); } } public Builder clear() { super.clear(); if (cellBuilder_ == null) { cell_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { cellBuilder_.clear(); } associatedCellCount_ = 0; bitField0_ = (bitField0_ & ~0x00000002); exists_ = false; bitField0_ = (bitField0_ & ~0x00000004); stale_ = false; bitField0_ = (bitField0_ & ~0x00000008); partial_ = false; bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Result_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (cellBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { cell_ = java.util.Collections.unmodifiableList(cell_); bitField0_ = (bitField0_ & ~0x00000001); } result.cell_ = cell_; } else { result.cell_ = cellBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.associatedCellCount_ = associatedCellCount_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } result.exists_ = exists_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.stale_ = stale_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000008; } result.partial_ = partial_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; if (cellBuilder_ == null) { if (!other.cell_.isEmpty()) { if (cell_.isEmpty()) { cell_ = other.cell_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCellIsMutable(); cell_.addAll(other.cell_); } onChanged(); } } else { if (!other.cell_.isEmpty()) { if (cellBuilder_.isEmpty()) { cellBuilder_.dispose(); cellBuilder_ = null; cell_ = other.cell_; bitField0_ = (bitField0_ & ~0x00000001); cellBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCellFieldBuilder() : null; } else { cellBuilder_.addAllMessages(other.cell_); } } } if (other.hasAssociatedCellCount()) { setAssociatedCellCount(other.getAssociatedCellCount()); } if (other.hasExists()) { setExists(other.getExists()); } if (other.hasStale()) { setStale(other.getStale()); } if (other.hasPartial()) { setPartial(other.getPartial()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell> cell_ = java.util.Collections.emptyList(); private void ensureCellIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell>(cell_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_; /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell> getCellList() { if (cellBuilder_ == null) { return java.util.Collections.unmodifiableList(cell_); } else { return cellBuilder_.getMessageList(); } } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public int getCellCount() { if (cellBuilder_ == null) { return cell_.size(); } else { return cellBuilder_.getCount(); } } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell getCell(int index) { if (cellBuilder_ == null) { return cell_.get(index); } else { return cellBuilder_.getMessage(index); } } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder setCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell value) { if (cellBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCellIsMutable(); cell_.set(index, value); onChanged(); } else { cellBuilder_.setMessage(index, value); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder setCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder builderForValue) { if (cellBuilder_ == null) { ensureCellIsMutable(); cell_.set(index, builderForValue.build()); onChanged(); } else { cellBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder addCell(org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell value) { if (cellBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCellIsMutable(); cell_.add(value); onChanged(); } else { cellBuilder_.addMessage(value); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder addCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell value) { if (cellBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCellIsMutable(); cell_.add(index, value); onChanged(); } else { cellBuilder_.addMessage(index, value); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder addCell( org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder builderForValue) { if (cellBuilder_ == null) { ensureCellIsMutable(); cell_.add(builderForValue.build()); onChanged(); } else { cellBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder addCell( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder builderForValue) { if (cellBuilder_ == null) { ensureCellIsMutable(); cell_.add(index, builderForValue.build()); onChanged(); } else { cellBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder addAllCell( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell> values) { if (cellBuilder_ == null) { ensureCellIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, cell_); onChanged(); } else { cellBuilder_.addAllMessages(values); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder clearCell() { if (cellBuilder_ == null) { cell_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { cellBuilder_.clear(); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public Builder removeCell(int index) { if (cellBuilder_ == null) { ensureCellIsMutable(); cell_.remove(index); onChanged(); } else { cellBuilder_.remove(index); } return this; } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder getCellBuilder( int index) { return getCellFieldBuilder().getBuilder(index); } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( int index) { if (cellBuilder_ == null) { return cell_.get(index); } else { return cellBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> getCellOrBuilderList() { if (cellBuilder_ != null) { return cellBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cell_); } } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() { return getCellFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.getDefaultInstance()); } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder addCellBuilder( int index) { return getCellFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.getDefaultInstance()); } /** * <pre> * Result includes the Cells or else it just has a count of Cells * that are carried otherwise. * </pre> * * <code>repeated .hbase.pb.Cell cell = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder> getCellBuilderList() { return getCellFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder> getCellFieldBuilder() { if (cellBuilder_ == null) { cellBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.CellOrBuilder>( cell_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); cell_ = null; } return cellBuilder_; } private int associatedCellCount_ ; /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ public int getAssociatedCellCount() { return associatedCellCount_; } /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000002; associatedCellCount_ = value; onChanged(); return this; } /** * <pre> * The below count is set when the associated cells are * not part of this protobuf message; they are passed alongside * and then this Message is just a placeholder with metadata. * The count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 2;</code> */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000002); associatedCellCount_ = 0; onChanged(); return this; } private boolean exists_ ; /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ public boolean hasExists() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ public boolean getExists() { return exists_; } /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ public Builder setExists(boolean value) { bitField0_ |= 0x00000004; exists_ = value; onChanged(); return this; } /** * <pre> * used for Get to check existence only. Not set if existence_only was not set to true * in the query. * </pre> * * <code>optional bool exists = 3;</code> */ public Builder clearExists() { bitField0_ = (bitField0_ & ~0x00000004); exists_ = false; onChanged(); return this; } private boolean stale_ ; /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ public boolean hasStale() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ public boolean getStale() { return stale_; } /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ public Builder setStale(boolean value) { bitField0_ |= 0x00000008; stale_ = value; onChanged(); return this; } /** * <pre> * Whether or not the results are coming from possibly stale data * </pre> * * <code>optional bool stale = 4 [default = false];</code> */ public Builder clearStale() { bitField0_ = (bitField0_ & ~0x00000008); stale_ = false; onChanged(); return this; } private boolean partial_ ; /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ public boolean hasPartial() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ public boolean getPartial() { return partial_; } /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ public Builder setPartial(boolean value) { bitField0_ |= 0x00000010; partial_ = value; onChanged(); return this; } /** * <pre> * Whether or not the entire result could be returned. Results will be split when * the RPC chunk size limit is reached. Partial results contain only a subset of the * cells for a row and must be combined with a result containing the remaining cells * to form a complete result. The equivalent flag in o.a.h.h.client.Result is * mayHaveMoreCellsInRow. * </pre> * * <code>optional bool partial = 5 [default = false];</code> */ public Builder clearPartial() { bitField0_ = (bitField0_ & ~0x00000010); partial_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Result) } // @@protoc_insertion_point(class_scope:hbase.pb.Result) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Result> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Result>() { public Result parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Result(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Result> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Result> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.GetRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <code>required .hbase.pb.Get get = 2;</code> */ boolean hasGet(); /** * <code>required .hbase.pb.Get get = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet(); /** * <code>required .hbase.pb.Get get = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); } /** * <pre> ** * The get request. Perform a single Get operation. * </pre> * * Protobuf type {@code hbase.pb.GetRequest} */ public static final class GetRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.GetRequest) GetRequestOrBuilder { // Use GetRequest.newBuilder() to construct. private GetRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetRequest() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = get_.toBuilder(); } get_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(get_); get_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.Builder.class); } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int GET_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_; /** * <code>required .hbase.pb.Get get = 2;</code> */ public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.Get get = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } /** * <code>required .hbase.pb.Get get = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!hasGet()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getGet().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getGet()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getGet()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasGet() == other.hasGet()); if (hasGet()) { result = result && getGet() .equals(other.getGet()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasGet()) { hash = (37 * hash) + GET_FIELD_NUMBER; hash = (53 * hash) + getGet().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The get request. Perform a single Get operation. * </pre> * * Protobuf type {@code hbase.pb.GetRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.GetRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getGetFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (getBuilder_ == null) { get_ = null; } else { getBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (getBuilder_ == null) { result.get_ = get_; } else { result.get_ = getBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasGet()) { mergeGet(other.getGet()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!hasGet()) { return false; } if (!getRegion().isInitialized()) { return false; } if (!getGet().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; /** * <code>required .hbase.pb.Get get = 2;</code> */ public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.Get get = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } else { return getBuilder_.getMessage(); } } /** * <code>required .hbase.pb.Get get = 2;</code> */ public Builder setGet(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (value == null) { throw new NullPointerException(); } get_ = value; onChanged(); } else { getBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.Get get = 2;</code> */ public Builder setGet( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { get_ = builderForValue.build(); onChanged(); } else { getBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.Get get = 2;</code> */ public Builder mergeGet(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && get_ != null && get_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); } else { get_ = value; } onChanged(); } else { getBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.Get get = 2;</code> */ public Builder clearGet() { if (getBuilder_ == null) { get_ = null; onChanged(); } else { getBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.Get get = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { bitField0_ |= 0x00000002; onChanged(); return getGetFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.Get get = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); } else { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } } /** * <code>required .hbase.pb.Get get = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { if (getBuilder_ == null) { getBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder>( getGet(), getParentForChildren(), isClean()); get_ = null; } return getBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.GetRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.GetRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GetRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<GetRequest>() { public GetRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new GetRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GetRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GetRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface GetResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.GetResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional .hbase.pb.Result result = 1;</code> */ boolean hasResult(); /** * <code>optional .hbase.pb.Result result = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult(); /** * <code>optional .hbase.pb.Result result = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); } /** * Protobuf type {@code hbase.pb.GetResponse} */ public static final class GetResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.GetResponse) GetResponseOrBuilder { // Use GetResponse.newBuilder() to construct. private GetResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetResponse() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = result_.toBuilder(); } result_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(result_); result_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.Builder.class); } private int bitField0_; public static final int RESULT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_; /** * <code>optional .hbase.pb.Result result = 1;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getResult()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getResult()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse) obj; boolean result = true; result = result && (hasResult() == other.hasResult()); if (hasResult()) { result = result && getResult() .equals(other.getResult()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResult()) { hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.GetResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.GetResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResultFieldBuilder(); } } public Builder clear() { super.clear(); if (resultBuilder_ == null) { result_ = null; } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_GetResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (resultBuilder_ == null) { result.result_ = result_; } else { result.result_ = resultBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; if (other.hasResult()) { mergeResult(other.getResult()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; /** * <code>optional .hbase.pb.Result result = 1;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } else { return resultBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder setResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } result_ = value; onChanged(); } else { resultBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder setResult( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); } else { resultBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder mergeResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && result_ != null && result_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); } else { result_ = value; } onChanged(); } else { resultBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = null; onChanged(); } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResultFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } } /** * <code>optional .hbase.pb.Result result = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { resultBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( getResult(), getParentForChildren(), isClean()); result_ = null; } return resultBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.GetResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.GetResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GetResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<GetResponse>() { public GetResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new GetResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GetResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<GetResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ConditionOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Condition) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes row = 1;</code> */ boolean hasRow(); /** * <code>required bytes row = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow(); /** * <code>required bytes family = 2;</code> */ boolean hasFamily(); /** * <code>required bytes family = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily(); /** * <code>required bytes qualifier = 3;</code> */ boolean hasQualifier(); /** * <code>required bytes qualifier = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier(); /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ boolean hasCompareType(); /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareType(); /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ boolean hasComparator(); /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator(); /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); } /** * <pre> ** * Condition to check if the value of a given cell (row, * family, qualifier) matches a value via a given comparator. * Condition is used in check and mutate operations. * </pre> * * Protobuf type {@code hbase.pb.Condition} */ public static final class Condition extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Condition) ConditionOrBuilder { // Use Condition.newBuilder() to construct. private Condition(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Condition() { row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; compareType_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Condition( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; row_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; family_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; qualifier_ = input.readBytes(); break; } case 32: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; compareType_ = rawValue; } break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = comparator_.toBuilder(); } comparator_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(comparator_); comparator_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder.class); } private int bitField0_; public static final int ROW_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_; /** * <code>required bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } public static final int FAMILY_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_; /** * <code>required bytes family = 2;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes family = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } public static final int QUALIFIER_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_; /** * <code>required bytes qualifier = 3;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes qualifier = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() { return qualifier_; } public static final int COMPARE_TYPE_FIELD_NUMBER = 4; private int compareType_; /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ public boolean hasCompareType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareType() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } public static final int COMPARATOR_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_; /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public boolean hasComparator() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; return false; } if (!hasFamily()) { memoizedIsInitialized = 0; return false; } if (!hasQualifier()) { memoizedIsInitialized = 0; return false; } if (!hasCompareType()) { memoizedIsInitialized = 0; return false; } if (!hasComparator()) { memoizedIsInitialized = 0; return false; } if (!getComparator().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, family_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, qualifier_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeEnum(4, compareType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, getComparator()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, family_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, qualifier_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(4, compareType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getComparator()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { result = result && getRow() .equals(other.getRow()); } result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { result = result && getFamily() .equals(other.getFamily()); } result = result && (hasQualifier() == other.hasQualifier()); if (hasQualifier()) { result = result && getQualifier() .equals(other.getQualifier()); } result = result && (hasCompareType() == other.hasCompareType()); if (hasCompareType()) { result = result && compareType_ == other.compareType_; } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { result = result && getComparator() .equals(other.getComparator()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRow()) { hash = (37 * hash) + ROW_FIELD_NUMBER; hash = (53 * hash) + getRow().hashCode(); } if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } if (hasQualifier()) { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifier().hashCode(); } if (hasCompareType()) { hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER; hash = (53 * hash) + compareType_; } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; hash = (53 * hash) + getComparator().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Condition to check if the value of a given cell (row, * family, qualifier) matches a value via a given comparator. * Condition is used in check and mutate operations. * </pre> * * Protobuf type {@code hbase.pb.Condition} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Condition) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getComparatorFieldBuilder(); } } public Builder clear() { super.clear(); row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); compareType_ = 0; bitField0_ = (bitField0_ & ~0x00000008); if (comparatorBuilder_ == null) { comparator_ = null; } else { comparatorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Condition_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.row_ = row_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.family_ = family_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.qualifier_ = qualifier_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.compareType_ = compareType_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (comparatorBuilder_ == null) { result.comparator_ = comparator_; } else { result.comparator_ = comparatorBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } if (other.hasFamily()) { setFamily(other.getFamily()); } if (other.hasQualifier()) { setQualifier(other.getQualifier()); } if (other.hasCompareType()) { setCompareType(other.getCompareType()); } if (other.hasComparator()) { mergeComparator(other.getComparator()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { return false; } if (!hasFamily()) { return false; } if (!hasQualifier()) { return false; } if (!hasCompareType()) { return false; } if (!hasComparator()) { return false; } if (!getComparator().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } /** * <code>required bytes row = 1;</code> */ public Builder setRow(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; row_ = value; onChanged(); return this; } /** * <code>required bytes row = 1;</code> */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family = 2;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes family = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } /** * <code>required bytes family = 2;</code> */ public Builder setFamily(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; family_ = value; onChanged(); return this; } /** * <code>required bytes family = 2;</code> */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000002); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes qualifier = 3;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes qualifier = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() { return qualifier_; } /** * <code>required bytes qualifier = 3;</code> */ public Builder setQualifier(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; qualifier_ = value; onChanged(); return this; } /** * <code>required bytes qualifier = 3;</code> */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000004); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } private int compareType_ = 0; /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ public boolean hasCompareType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType getCompareType() { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.valueOf(compareType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType.LESS : result; } /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ public Builder setCompareType(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; compareType_ = value.getNumber(); onChanged(); return this; } /** * <code>required .hbase.pb.CompareType compare_type = 4;</code> */ public Builder clearCompareType() { bitField0_ = (bitField0_ & ~0x00000008); compareType_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator comparator_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public boolean hasComparator() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator getComparator() { if (comparatorBuilder_ == null) { return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } else { return comparatorBuilder_.getMessage(); } } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public Builder setComparator(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } comparator_ = value; onChanged(); } else { comparatorBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public Builder setComparator( org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { if (comparatorBuilder_ == null) { comparator_ = builderForValue.build(); onChanged(); } else { comparatorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public Builder mergeComparator(org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator value) { if (comparatorBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && comparator_ != null && comparator_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { comparator_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); } else { comparator_ = value; } onChanged(); } else { comparatorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public Builder clearComparator() { if (comparatorBuilder_ == null) { comparator_ = null; onChanged(); } else { comparatorBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { bitField0_ |= 0x00000010; onChanged(); return getComparatorFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { if (comparatorBuilder_ != null) { return comparatorBuilder_.getMessageOrBuilder(); } else { return comparator_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance() : comparator_; } } /** * <code>required .hbase.pb.Comparator comparator = 5;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder() { if (comparatorBuilder_ == null) { comparatorBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( getComparator(), getParentForChildren(), isClean()); comparator_ = null; } return comparatorBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Condition) } // @@protoc_insertion_point(class_scope:hbase.pb.Condition) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Condition> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Condition>() { public Condition parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Condition(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Condition> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Condition> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MutationProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MutationProto) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional bytes row = 1;</code> */ boolean hasRow(); /** * <code>optional bytes row = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow(); /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ boolean hasMutateType(); /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ int getColumnValueCount(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueOrBuilderList(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index); /** * <code>optional uint64 timestamp = 4;</code> */ boolean hasTimestamp(); /** * <code>optional uint64 timestamp = 4;</code> */ long getTimestamp(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ int getAttributeCount(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ boolean hasDurability(); /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability getDurability(); /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ boolean hasTimeRange(); /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ boolean hasAssociatedCellCount(); /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ int getAssociatedCellCount(); /** * <code>optional uint64 nonce = 9;</code> */ boolean hasNonce(); /** * <code>optional uint64 nonce = 9;</code> */ long getNonce(); } /** * <pre> ** * A specific mutation inside a mutate request. * It can be an append, increment, put or delete based * on the mutation type. It can be fully filled in or * only metadata present because data is being carried * elsewhere outside of pb. * </pre> * * Protobuf type {@code hbase.pb.MutationProto} */ public static final class MutationProto extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MutationProto) MutationProtoOrBuilder { // Use MutationProto.newBuilder() to construct. private MutationProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MutationProto() { row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; mutateType_ = 0; columnValue_ = java.util.Collections.emptyList(); timestamp_ = 0L; attribute_ = java.util.Collections.emptyList(); durability_ = 0; associatedCellCount_ = 0; nonce_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MutationProto( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; row_ = input.readBytes(); break; } case 16: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; mutateType_ = rawValue; } break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(); mutable_bitField0_ |= 0x00000004; } columnValue_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry)); break; } case 32: { bitField0_ |= 0x00000004; timestamp_ = input.readUInt64(); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair>(); mutable_bitField0_ |= 0x00000010; } attribute_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); break; } case 48: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(6, rawValue); } else { bitField0_ |= 0x00000008; durability_ = rawValue; } break; } case 58: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = timeRange_.toBuilder(); } timeRange_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timeRange_); timeRange_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 64: { bitField0_ |= 0x00000020; associatedCellCount_ = input.readInt32(); break; } case 72: { bitField0_ |= 0x00000040; nonce_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columnValue_ = java.util.Collections.unmodifiableList(columnValue_); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { attribute_ = java.util.Collections.unmodifiableList(attribute_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder.class); } /** * Protobuf enum {@code hbase.pb.MutationProto.Durability} */ public enum Durability implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>USE_DEFAULT = 0;</code> */ USE_DEFAULT(0), /** * <code>SKIP_WAL = 1;</code> */ SKIP_WAL(1), /** * <code>ASYNC_WAL = 2;</code> */ ASYNC_WAL(2), /** * <code>SYNC_WAL = 3;</code> */ SYNC_WAL(3), /** * <code>FSYNC_WAL = 4;</code> */ FSYNC_WAL(4), ; /** * <code>USE_DEFAULT = 0;</code> */ public static final int USE_DEFAULT_VALUE = 0; /** * <code>SKIP_WAL = 1;</code> */ public static final int SKIP_WAL_VALUE = 1; /** * <code>ASYNC_WAL = 2;</code> */ public static final int ASYNC_WAL_VALUE = 2; /** * <code>SYNC_WAL = 3;</code> */ public static final int SYNC_WAL_VALUE = 3; /** * <code>FSYNC_WAL = 4;</code> */ public static final int FSYNC_WAL_VALUE = 4; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Durability valueOf(int value) { return forNumber(value); } public static Durability forNumber(int value) { switch (value) { case 0: return USE_DEFAULT; case 1: return SKIP_WAL; case 2: return ASYNC_WAL; case 3: return SYNC_WAL; case 4: return FSYNC_WAL; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Durability> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< Durability> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Durability>() { public Durability findValueByNumber(int number) { return Durability.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0); } private static final Durability[] VALUES = values(); public static Durability valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private Durability(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.Durability) } /** * Protobuf enum {@code hbase.pb.MutationProto.MutationType} */ public enum MutationType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>APPEND = 0;</code> */ APPEND(0), /** * <code>INCREMENT = 1;</code> */ INCREMENT(1), /** * <code>PUT = 2;</code> */ PUT(2), /** * <code>DELETE = 3;</code> */ DELETE(3), ; /** * <code>APPEND = 0;</code> */ public static final int APPEND_VALUE = 0; /** * <code>INCREMENT = 1;</code> */ public static final int INCREMENT_VALUE = 1; /** * <code>PUT = 2;</code> */ public static final int PUT_VALUE = 2; /** * <code>DELETE = 3;</code> */ public static final int DELETE_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MutationType valueOf(int value) { return forNumber(value); } public static MutationType forNumber(int value) { switch (value) { case 0: return APPEND; case 1: return INCREMENT; case 2: return PUT; case 3: return DELETE; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<MutationType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< MutationType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<MutationType>() { public MutationType findValueByNumber(int number) { return MutationType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1); } private static final MutationType[] VALUES = values(); public static MutationType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private MutationType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.MutationType) } /** * Protobuf enum {@code hbase.pb.MutationProto.DeleteType} */ public enum DeleteType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>DELETE_ONE_VERSION = 0;</code> */ DELETE_ONE_VERSION(0), /** * <code>DELETE_MULTIPLE_VERSIONS = 1;</code> */ DELETE_MULTIPLE_VERSIONS(1), /** * <code>DELETE_FAMILY = 2;</code> */ DELETE_FAMILY(2), /** * <code>DELETE_FAMILY_VERSION = 3;</code> */ DELETE_FAMILY_VERSION(3), ; /** * <code>DELETE_ONE_VERSION = 0;</code> */ public static final int DELETE_ONE_VERSION_VALUE = 0; /** * <code>DELETE_MULTIPLE_VERSIONS = 1;</code> */ public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; /** * <code>DELETE_FAMILY = 2;</code> */ public static final int DELETE_FAMILY_VALUE = 2; /** * <code>DELETE_FAMILY_VERSION = 3;</code> */ public static final int DELETE_FAMILY_VERSION_VALUE = 3; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DeleteType valueOf(int value) { return forNumber(value); } public static DeleteType forNumber(int value) { switch (value) { case 0: return DELETE_ONE_VERSION; case 1: return DELETE_MULTIPLE_VERSIONS; case 2: return DELETE_FAMILY; case 3: return DELETE_FAMILY_VERSION; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<DeleteType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< DeleteType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<DeleteType>() { public DeleteType findValueByNumber(int number) { return DeleteType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(2); } private static final DeleteType[] VALUES = values(); public static DeleteType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private DeleteType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.MutationProto.DeleteType) } public interface ColumnValueOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MutationProto.ColumnValue) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes family = 1;</code> */ boolean hasFamily(); /** * <code>required bytes family = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ int getQualifierValueCount(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueOrBuilderList(); /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.MutationProto.ColumnValue} */ public static final class ColumnValue extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MutationProto.ColumnValue) ColumnValueOrBuilder { // Use ColumnValue.newBuilder() to construct. private ColumnValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ColumnValue() { family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; qualifierValue_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ColumnValue( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; family_ = input.readBytes(); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(); mutable_bitField0_ |= 0x00000002; } qualifierValue_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); } public interface QualifierValueOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MutationProto.ColumnValue.QualifierValue) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional bytes qualifier = 1;</code> */ boolean hasQualifier(); /** * <code>optional bytes qualifier = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier(); /** * <code>optional bytes value = 2;</code> */ boolean hasValue(); /** * <code>optional bytes value = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue(); /** * <code>optional uint64 timestamp = 3;</code> */ boolean hasTimestamp(); /** * <code>optional uint64 timestamp = 3;</code> */ long getTimestamp(); /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ boolean hasDeleteType(); /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType(); /** * <code>optional bytes tags = 5;</code> */ boolean hasTags(); /** * <code>optional bytes tags = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTags(); } /** * Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue} */ public static final class QualifierValue extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MutationProto.ColumnValue.QualifierValue) QualifierValueOrBuilder { // Use QualifierValue.newBuilder() to construct. private QualifierValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private QualifierValue() { qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; timestamp_ = 0L; deleteType_ = 0; tags_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private QualifierValue( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; qualifier_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; value_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; timestamp_ = input.readUInt64(); break; } case 32: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000008; deleteType_ = rawValue; } break; } case 42: { bitField0_ |= 0x00000010; tags_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); } private int bitField0_; public static final int QUALIFIER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_; /** * <code>optional bytes qualifier = 1;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes qualifier = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() { return qualifier_; } public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_; /** * <code>optional bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { return value_; } public static final int TIMESTAMP_FIELD_NUMBER = 3; private long timestamp_; /** * <code>optional uint64 timestamp = 3;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 timestamp = 3;</code> */ public long getTimestamp() { return timestamp_; } public static final int DELETE_TYPE_FIELD_NUMBER = 4; private int deleteType_; /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ public boolean hasDeleteType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(deleteType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION : result; } public static final int TAGS_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tags_; /** * <code>optional bytes tags = 5;</code> */ public boolean hasTags() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes tags = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTags() { return tags_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, qualifier_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, value_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, timestamp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeEnum(4, deleteType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, tags_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, qualifier_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, value_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, timestamp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(4, deleteType_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(5, tags_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj; boolean result = true; result = result && (hasQualifier() == other.hasQualifier()); if (hasQualifier()) { result = result && getQualifier() .equals(other.getQualifier()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && (hasTimestamp() == other.hasTimestamp()); if (hasTimestamp()) { result = result && (getTimestamp() == other.getTimestamp()); } result = result && (hasDeleteType() == other.hasDeleteType()); if (hasDeleteType()) { result = result && deleteType_ == other.deleteType_; } result = result && (hasTags() == other.hasTags()); if (hasTags()) { result = result && getTags() .equals(other.getTags()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasQualifier()) { hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; hash = (53 * hash) + getQualifier().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getTimestamp()); } if (hasDeleteType()) { hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER; hash = (53 * hash) + deleteType_; } if (hasTags()) { hash = (37 * hash) + TAGS_FIELD_NUMBER; hash = (53 * hash) + getTags().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MutationProto.ColumnValue.QualifierValue} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MutationProto.ColumnValue.QualifierValue) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); timestamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); deleteType_ = 0; bitField0_ = (bitField0_ & ~0x00000008); tags_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.qualifier_ = qualifier_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.value_ = value_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.timestamp_ = timestamp_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.deleteType_ = deleteType_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.tags_ = tags_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this; if (other.hasQualifier()) { setQualifier(other.getQualifier()); } if (other.hasValue()) { setValue(other.getValue()); } if (other.hasTimestamp()) { setTimestamp(other.getTimestamp()); } if (other.hasDeleteType()) { setDeleteType(other.getDeleteType()); } if (other.hasTags()) { setTags(other.getTags()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes qualifier = 1;</code> */ public boolean hasQualifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes qualifier = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() { return qualifier_; } /** * <code>optional bytes qualifier = 1;</code> */ public Builder setQualifier(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; qualifier_ = value; onChanged(); return this; } /** * <code>optional bytes qualifier = 1;</code> */ public Builder clearQualifier() { bitField0_ = (bitField0_ & ~0x00000001); qualifier_ = getDefaultInstance().getQualifier(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes value = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { return value_; } /** * <code>optional bytes value = 2;</code> */ public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; value_ = value; onChanged(); return this; } /** * <code>optional bytes value = 2;</code> */ public Builder clearValue() { bitField0_ = (bitField0_ & ~0x00000002); value_ = getDefaultInstance().getValue(); onChanged(); return this; } private long timestamp_ ; /** * <code>optional uint64 timestamp = 3;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 timestamp = 3;</code> */ public long getTimestamp() { return timestamp_; } /** * <code>optional uint64 timestamp = 3;</code> */ public Builder setTimestamp(long value) { bitField0_ |= 0x00000004; timestamp_ = value; onChanged(); return this; } /** * <code>optional uint64 timestamp = 3;</code> */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000004); timestamp_ = 0L; onChanged(); return this; } private int deleteType_ = 0; /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ public boolean hasDeleteType() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(deleteType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION : result; } /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ public Builder setDeleteType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; deleteType_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.MutationProto.DeleteType delete_type = 4;</code> */ public Builder clearDeleteType() { bitField0_ = (bitField0_ & ~0x00000008); deleteType_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString tags_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes tags = 5;</code> */ public boolean hasTags() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes tags = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getTags() { return tags_; } /** * <code>optional bytes tags = 5;</code> */ public Builder setTags(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; tags_ = value; onChanged(); return this; } /** * <code>optional bytes tags = 5;</code> */ public Builder clearTags() { bitField0_ = (bitField0_ & ~0x00000010); tags_ = getDefaultInstance().getTags(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue) } // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue.QualifierValue) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<QualifierValue> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<QualifierValue>() { public QualifierValue parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new QualifierValue(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<QualifierValue> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<QualifierValue> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private int bitField0_; public static final int FAMILY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_; /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() { return qualifierValue_; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueOrBuilderList() { return qualifierValue_; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public int getQualifierValueCount() { return qualifierValue_.size(); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) { return qualifierValue_.get(index); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index) { return qualifierValue_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } for (int i = 0; i < qualifierValue_.size(); i++) { output.writeMessage(2, qualifierValue_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, family_); } for (int i = 0; i < qualifierValue_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, qualifierValue_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { result = result && getFamily() .equals(other.getFamily()); } result = result && getQualifierValueList() .equals(other.getQualifierValueList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } if (getQualifierValueCount() > 0) { hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER; hash = (53 * hash) + getQualifierValueList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MutationProto.ColumnValue} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MutationProto.ColumnValue) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getQualifierValueFieldBuilder(); } } public Builder clear() { super.clear(); family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); if (qualifierValueBuilder_ == null) { qualifierValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { qualifierValueBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.family_ = family_; if (qualifierValueBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); bitField0_ = (bitField0_ & ~0x00000002); } result.qualifierValue_ = qualifierValue_; } else { result.qualifierValue_ = qualifierValueBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } if (qualifierValueBuilder_ == null) { if (!other.qualifierValue_.isEmpty()) { if (qualifierValue_.isEmpty()) { qualifierValue_ = other.qualifierValue_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureQualifierValueIsMutable(); qualifierValue_.addAll(other.qualifierValue_); } onChanged(); } } else { if (!other.qualifierValue_.isEmpty()) { if (qualifierValueBuilder_.isEmpty()) { qualifierValueBuilder_.dispose(); qualifierValueBuilder_ = null; qualifierValue_ = other.qualifierValue_; bitField0_ = (bitField0_ & ~0x00000002); qualifierValueBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getQualifierValueFieldBuilder() : null; } else { qualifierValueBuilder_.addAllMessages(other.qualifierValue_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } /** * <code>required bytes family = 1;</code> */ public Builder setFamily(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; family_ = value; onChanged(); return this; } /** * <code>required bytes family = 1;</code> */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_ = java.util.Collections.emptyList(); private void ensureQualifierValueIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(qualifierValue_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() { if (qualifierValueBuilder_ == null) { return java.util.Collections.unmodifiableList(qualifierValue_); } else { return qualifierValueBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public int getQualifierValueCount() { if (qualifierValueBuilder_ == null) { return qualifierValue_.size(); } else { return qualifierValueBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) { if (qualifierValueBuilder_ == null) { return qualifierValue_.get(index); } else { return qualifierValueBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder setQualifierValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQualifierValueIsMutable(); qualifierValue_.set(index, value); onChanged(); } else { qualifierValueBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder setQualifierValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.set(index, builderForValue.build()); onChanged(); } else { qualifierValueBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder addQualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQualifierValueIsMutable(); qualifierValue_.add(value); onChanged(); } else { qualifierValueBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder addQualifierValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQualifierValueIsMutable(); qualifierValue_.add(index, value); onChanged(); } else { qualifierValueBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder addQualifierValue( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.add(builderForValue.build()); onChanged(); } else { qualifierValueBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder addQualifierValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.add(index, builderForValue.build()); onChanged(); } else { qualifierValueBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder addAllQualifierValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, qualifierValue_); onChanged(); } else { qualifierValueBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder clearQualifierValue() { if (qualifierValueBuilder_ == null) { qualifierValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { qualifierValueBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public Builder removeQualifierValue(int index) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.remove(index); onChanged(); } else { qualifierValueBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( int index) { return getQualifierValueFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index) { if (qualifierValueBuilder_ == null) { return qualifierValue_.get(index); } else { return qualifierValueBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueOrBuilderList() { if (qualifierValueBuilder_ != null) { return qualifierValueBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(qualifierValue_); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { return getQualifierValueFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( int index) { return getQualifierValueFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder> getQualifierValueBuilderList() { return getQualifierValueFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueFieldBuilder() { if (qualifierValueBuilder_ == null) { qualifierValueBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>( qualifierValue_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); qualifierValue_ = null; } return qualifierValueBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto.ColumnValue) } // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto.ColumnValue) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnValue> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ColumnValue>() { public ColumnValue parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ColumnValue(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnValue> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ColumnValue> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private int bitField0_; public static final int ROW_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_; /** * <code>optional bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } public static final int MUTATE_TYPE_FIELD_NUMBER = 2; private int mutateType_; /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ public boolean hasMutateType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(mutateType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND : result; } public static final int COLUMN_VALUE_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_; /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() { return columnValue_; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueOrBuilderList() { return columnValue_; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public int getColumnValueCount() { return columnValue_.size(); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) { return columnValue_.get(index); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index) { return columnValue_.get(index); } public static final int TIMESTAMP_FIELD_NUMBER = 4; private long timestamp_; /** * <code>optional uint64 timestamp = 4;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 timestamp = 4;</code> */ public long getTimestamp() { return timestamp_; } public static final int ATTRIBUTE_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> attribute_; /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { return attribute_; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList() { return attribute_; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public int getAttributeCount() { return attribute_.size(); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } public static final int DURABILITY_FIELD_NUMBER = 6; private int durability_; /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ public boolean hasDurability() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(durability_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT : result; } public static final int TIME_RANGE_FIELD_NUMBER = 7; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8; private int associatedCellCount_; /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ public int getAssociatedCellCount() { return associatedCellCount_; } public static final int NONCE_FIELD_NUMBER = 9; private long nonce_; /** * <code>optional uint64 nonce = 9;</code> */ public boolean hasNonce() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint64 nonce = 9;</code> */ public long getNonce() { return nonce_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getColumnValueCount(); i++) { if (!getColumnValue(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, mutateType_); } for (int i = 0; i < columnValue_.size(); i++) { output.writeMessage(3, columnValue_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(4, timestamp_); } for (int i = 0; i < attribute_.size(); i++) { output.writeMessage(5, attribute_.get(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeEnum(6, durability_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(7, getTimeRange()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeInt32(8, associatedCellCount_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt64(9, nonce_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(2, mutateType_); } for (int i = 0; i < columnValue_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, columnValue_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, timestamp_); } for (int i = 0; i < attribute_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, attribute_.get(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(6, durability_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(7, getTimeRange()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(8, associatedCellCount_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(9, nonce_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { result = result && getRow() .equals(other.getRow()); } result = result && (hasMutateType() == other.hasMutateType()); if (hasMutateType()) { result = result && mutateType_ == other.mutateType_; } result = result && getColumnValueList() .equals(other.getColumnValueList()); result = result && (hasTimestamp() == other.hasTimestamp()); if (hasTimestamp()) { result = result && (getTimestamp() == other.getTimestamp()); } result = result && getAttributeList() .equals(other.getAttributeList()); result = result && (hasDurability() == other.hasDurability()); if (hasDurability()) { result = result && durability_ == other.durability_; } result = result && (hasTimeRange() == other.hasTimeRange()); if (hasTimeRange()) { result = result && getTimeRange() .equals(other.getTimeRange()); } result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount()); if (hasAssociatedCellCount()) { result = result && (getAssociatedCellCount() == other.getAssociatedCellCount()); } result = result && (hasNonce() == other.hasNonce()); if (hasNonce()) { result = result && (getNonce() == other.getNonce()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRow()) { hash = (37 * hash) + ROW_FIELD_NUMBER; hash = (53 * hash) + getRow().hashCode(); } if (hasMutateType()) { hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER; hash = (53 * hash) + mutateType_; } if (getColumnValueCount() > 0) { hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER; hash = (53 * hash) + getColumnValueList().hashCode(); } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getTimestamp()); } if (getAttributeCount() > 0) { hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; hash = (53 * hash) + getAttributeList().hashCode(); } if (hasDurability()) { hash = (37 * hash) + DURABILITY_FIELD_NUMBER; hash = (53 * hash) + durability_; } if (hasTimeRange()) { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getTimeRange().hashCode(); } if (hasAssociatedCellCount()) { hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER; hash = (53 * hash) + getAssociatedCellCount(); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNonce()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * A specific mutation inside a mutate request. * It can be an append, increment, put or delete based * on the mutation type. It can be fully filled in or * only metadata present because data is being carried * elsewhere outside of pb. * </pre> * * Protobuf type {@code hbase.pb.MutationProto} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MutationProto) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getColumnValueFieldBuilder(); getAttributeFieldBuilder(); getTimeRangeFieldBuilder(); } } public Builder clear() { super.clear(); row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); mutateType_ = 0; bitField0_ = (bitField0_ & ~0x00000002); if (columnValueBuilder_ == null) { columnValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { columnValueBuilder_.clear(); } timestamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { attributeBuilder_.clear(); } durability_ = 0; bitField0_ = (bitField0_ & ~0x00000020); if (timeRangeBuilder_ == null) { timeRange_ = null; } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000040); associatedCellCount_ = 0; bitField0_ = (bitField0_ & ~0x00000080); nonce_ = 0L; bitField0_ = (bitField0_ & ~0x00000100); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutationProto_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.row_ = row_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.mutateType_ = mutateType_; if (columnValueBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { columnValue_ = java.util.Collections.unmodifiableList(columnValue_); bitField0_ = (bitField0_ & ~0x00000004); } result.columnValue_ = columnValue_; } else { result.columnValue_ = columnValueBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.timestamp_ = timestamp_; if (attributeBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { attribute_ = java.util.Collections.unmodifiableList(attribute_); bitField0_ = (bitField0_ & ~0x00000010); } result.attribute_ = attribute_; } else { result.attribute_ = attributeBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000008; } result.durability_ = durability_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000010; } if (timeRangeBuilder_ == null) { result.timeRange_ = timeRange_; } else { result.timeRange_ = timeRangeBuilder_.build(); } if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000020; } result.associatedCellCount_ = associatedCellCount_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000040; } result.nonce_ = nonce_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } if (other.hasMutateType()) { setMutateType(other.getMutateType()); } if (columnValueBuilder_ == null) { if (!other.columnValue_.isEmpty()) { if (columnValue_.isEmpty()) { columnValue_ = other.columnValue_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureColumnValueIsMutable(); columnValue_.addAll(other.columnValue_); } onChanged(); } } else { if (!other.columnValue_.isEmpty()) { if (columnValueBuilder_.isEmpty()) { columnValueBuilder_.dispose(); columnValueBuilder_ = null; columnValue_ = other.columnValue_; bitField0_ = (bitField0_ & ~0x00000004); columnValueBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnValueFieldBuilder() : null; } else { columnValueBuilder_.addAllMessages(other.columnValue_); } } } if (other.hasTimestamp()) { setTimestamp(other.getTimestamp()); } if (attributeBuilder_ == null) { if (!other.attribute_.isEmpty()) { if (attribute_.isEmpty()) { attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureAttributeIsMutable(); attribute_.addAll(other.attribute_); } onChanged(); } } else { if (!other.attribute_.isEmpty()) { if (attributeBuilder_.isEmpty()) { attributeBuilder_.dispose(); attributeBuilder_ = null; attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000010); attributeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributeFieldBuilder() : null; } else { attributeBuilder_.addAllMessages(other.attribute_); } } } if (other.hasDurability()) { setDurability(other.getDurability()); } if (other.hasTimeRange()) { mergeTimeRange(other.getTimeRange()); } if (other.hasAssociatedCellCount()) { setAssociatedCellCount(other.getAssociatedCellCount()); } if (other.hasNonce()) { setNonce(other.getNonce()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getColumnValueCount(); i++) { if (!getColumnValue(i).isInitialized()) { return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } /** * <code>optional bytes row = 1;</code> */ public Builder setRow(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; row_ = value; onChanged(); return this; } /** * <code>optional bytes row = 1;</code> */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } private int mutateType_ = 0; /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ public boolean hasMutateType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(mutateType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND : result; } /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ public Builder setMutateType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; mutateType_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.MutationProto.MutationType mutate_type = 2;</code> */ public Builder clearMutateType() { bitField0_ = (bitField0_ & ~0x00000002); mutateType_ = 0; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_ = java.util.Collections.emptyList(); private void ensureColumnValueIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(columnValue_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_; /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() { if (columnValueBuilder_ == null) { return java.util.Collections.unmodifiableList(columnValue_); } else { return columnValueBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public int getColumnValueCount() { if (columnValueBuilder_ == null) { return columnValue_.size(); } else { return columnValueBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) { if (columnValueBuilder_ == null) { return columnValue_.get(index); } else { return columnValueBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder setColumnValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnValueIsMutable(); columnValue_.set(index, value); onChanged(); } else { columnValueBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder setColumnValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.set(index, builderForValue.build()); onChanged(); } else { columnValueBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder addColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnValueIsMutable(); columnValue_.add(value); onChanged(); } else { columnValueBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder addColumnValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnValueIsMutable(); columnValue_.add(index, value); onChanged(); } else { columnValueBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder addColumnValue( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.add(builderForValue.build()); onChanged(); } else { columnValueBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder addColumnValue( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.add(index, builderForValue.build()); onChanged(); } else { columnValueBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder addAllColumnValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, columnValue_); onChanged(); } else { columnValueBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder clearColumnValue() { if (columnValueBuilder_ == null) { columnValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { columnValueBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public Builder removeColumnValue(int index) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.remove(index); onChanged(); } else { columnValueBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder( int index) { return getColumnValueFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( int index) { if (columnValueBuilder_ == null) { return columnValue_.get(index); } else { return columnValueBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueOrBuilderList() { if (columnValueBuilder_ != null) { return columnValueBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(columnValue_); } } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() { return getColumnValueFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder( int index) { return getColumnValueFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()); } /** * <code>repeated .hbase.pb.MutationProto.ColumnValue column_value = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder> getColumnValueBuilderList() { return getColumnValueFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueFieldBuilder() { if (columnValueBuilder_ == null) { columnValueBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>( columnValue_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); columnValue_ = null; } return columnValueBuilder_; } private long timestamp_ ; /** * <code>optional uint64 timestamp = 4;</code> */ public boolean hasTimestamp() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 timestamp = 4;</code> */ public long getTimestamp() { return timestamp_; } /** * <code>optional uint64 timestamp = 4;</code> */ public Builder setTimestamp(long value) { bitField0_ |= 0x00000008; timestamp_ = value; onChanged(); return this; } /** * <code>optional uint64 timestamp = 4;</code> */ public Builder clearTimestamp() { bitField0_ = (bitField0_ & ~0x00000008); timestamp_ = 0L; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_); bitField0_ |= 0x00000010; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); } else { return attributeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public int getAttributeCount() { if (attributeBuilder_ == null) { return attribute_.size(); } else { return attributeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder setAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.set(index, value); onChanged(); } else { attributeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder setAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.set(index, builderForValue.build()); onChanged(); } else { attributeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder addAttribute(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.add(value); onChanged(); } else { attributeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder addAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.add(index, value); onChanged(); } else { attributeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder addAttribute( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(builderForValue.build()); onChanged(); } else { attributeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder addAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(index, builderForValue.build()); onChanged(); } else { attributeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, attribute_); onChanged(); } else { attributeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder clearAttribute() { if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { attributeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public Builder removeAttribute(int index) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.remove(index); onChanged(); } else { attributeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList() { if (attributeBuilder_ != null) { return attributeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attribute_); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder> getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { attributeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); attribute_ = null; } return attributeBuilder_; } private int durability_ = 0; /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ public boolean hasDurability() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(durability_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT : result; } /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ public Builder setDurability(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Durability value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; durability_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> */ public Builder clearDurability() { bitField0_ = (bitField0_ & ~0x00000020); durability_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public Builder setTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeRange_ = value; onChanged(); } else { timeRangeBuilder_.setMessage(value); } bitField0_ |= 0x00000040; return this; } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public Builder setTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { timeRange_ = builderForValue.build(); onChanged(); } else { timeRangeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000040; return this; } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); } else { timeRange_ = value; } onChanged(); } else { timeRangeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; return this; } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000040); return this; } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000040; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * <pre> * For some mutations, a result may be returned, in which case, * time range can be specified for potential performance gain * </pre> * * <code>optional .hbase.pb.TimeRange time_range = 7;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { timeRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; } return timeRangeBuilder_; } private int associatedCellCount_ ; /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ public boolean hasAssociatedCellCount() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ public int getAssociatedCellCount() { return associatedCellCount_; } /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ public Builder setAssociatedCellCount(int value) { bitField0_ |= 0x00000080; associatedCellCount_ = value; onChanged(); return this; } /** * <pre> * The below count is set when the associated cells are NOT * part of this protobuf message; they are passed alongside * and then this Message is a placeholder with metadata. The * count is needed to know how many to peel off the block of Cells as * ours. NOTE: This is different from the pb managed cell_count of the * 'cell' field above which is non-null when the cells are pb'd. * </pre> * * <code>optional int32 associated_cell_count = 8;</code> */ public Builder clearAssociatedCellCount() { bitField0_ = (bitField0_ & ~0x00000080); associatedCellCount_ = 0; onChanged(); return this; } private long nonce_ ; /** * <code>optional uint64 nonce = 9;</code> */ public boolean hasNonce() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 nonce = 9;</code> */ public long getNonce() { return nonce_; } /** * <code>optional uint64 nonce = 9;</code> */ public Builder setNonce(long value) { bitField0_ |= 0x00000100; nonce_ = value; onChanged(); return this; } /** * <code>optional uint64 nonce = 9;</code> */ public Builder clearNonce() { bitField0_ = (bitField0_ & ~0x00000100); nonce_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MutationProto) } // @@protoc_insertion_point(class_scope:hbase.pb.MutationProto) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutationProto> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MutationProto>() { public MutationProto parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MutationProto(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutationProto> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutationProto> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MutateRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MutateRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ boolean hasMutation(); /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation(); /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ boolean hasCondition(); /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition(); /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); /** * <code>optional uint64 nonce_group = 4;</code> */ boolean hasNonceGroup(); /** * <code>optional uint64 nonce_group = 4;</code> */ long getNonceGroup(); } /** * <pre> ** * The mutate request. Perform a single Mutate operation. * Optionally, you can specify a condition. The mutate * will take place only if the condition is met. Otherwise, * the mutate will be ignored. In the response result, * parameter processed is used to indicate if the mutate * actually happened. * </pre> * * Protobuf type {@code hbase.pb.MutateRequest} */ public static final class MutateRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MutateRequest) MutateRequestOrBuilder { // Use MutateRequest.newBuilder() to construct. private MutateRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MutateRequest() { nonceGroup_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MutateRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = mutation_.toBuilder(); } mutation_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(mutation_); mutation_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = condition_.toBuilder(); } condition_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(condition_); condition_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 32: { bitField0_ |= 0x00000008; nonceGroup_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.Builder.class); } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int MUTATION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_; /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public boolean hasMutation() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } public static final int CONDITION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_; /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } public static final int NONCE_GROUP_FIELD_NUMBER = 4; private long nonceGroup_; /** * <code>optional uint64 nonce_group = 4;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 nonce_group = 4;</code> */ public long getNonceGroup() { return nonceGroup_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!hasMutation()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getMutation().isInitialized()) { memoizedIsInitialized = 0; return false; } if (hasCondition()) { if (!getCondition().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, getCondition()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, nonceGroup_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getCondition()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(4, nonceGroup_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasMutation() == other.hasMutation()); if (hasMutation()) { result = result && getMutation() .equals(other.getMutation()); } result = result && (hasCondition() == other.hasCondition()); if (hasCondition()) { result = result && getCondition() .equals(other.getCondition()); } result = result && (hasNonceGroup() == other.hasNonceGroup()); if (hasNonceGroup()) { result = result && (getNonceGroup() == other.getNonceGroup()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasMutation()) { hash = (37 * hash) + MUTATION_FIELD_NUMBER; hash = (53 * hash) + getMutation().hashCode(); } if (hasCondition()) { hash = (37 * hash) + CONDITION_FIELD_NUMBER; hash = (53 * hash) + getCondition().hashCode(); } if (hasNonceGroup()) { hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNonceGroup()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The mutate request. Perform a single Mutate operation. * Optionally, you can specify a condition. The mutate * will take place only if the condition is met. Otherwise, * the mutate will be ignored. In the response result, * parameter processed is used to indicate if the mutate * actually happened. * </pre> * * Protobuf type {@code hbase.pb.MutateRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MutateRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getMutationFieldBuilder(); getConditionFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (mutationBuilder_ == null) { mutation_ = null; } else { mutationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (conditionBuilder_ == null) { condition_ = null; } else { conditionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); nonceGroup_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (mutationBuilder_ == null) { result.mutation_ = mutation_; } else { result.mutation_ = mutationBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (conditionBuilder_ == null) { result.condition_ = condition_; } else { result.condition_ = conditionBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.nonceGroup_ = nonceGroup_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasMutation()) { mergeMutation(other.getMutation()); } if (other.hasCondition()) { mergeCondition(other.getCondition()); } if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!hasMutation()) { return false; } if (!getRegion().isInitialized()) { return false; } if (!getMutation().isInitialized()) { return false; } if (hasCondition()) { if (!getCondition().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public boolean hasMutation() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { if (mutationBuilder_ == null) { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } else { return mutationBuilder_.getMessage(); } } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public Builder setMutation(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mutation_ = value; onChanged(); } else { mutationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public Builder setMutation( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationBuilder_ == null) { mutation_ = builderForValue.build(); onChanged(); } else { mutationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public Builder mergeMutation(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && mutation_ != null && mutation_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) { mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial(); } else { mutation_ = value; } onChanged(); } else { mutationBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public Builder clearMutation() { if (mutationBuilder_ == null) { mutation_ = null; onChanged(); } else { mutationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMutationFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { if (mutationBuilder_ != null) { return mutationBuilder_.getMessageOrBuilder(); } else { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } } /** * <code>required .hbase.pb.MutationProto mutation = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder() { if (mutationBuilder_ == null) { mutationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( getMutation(), getParentForChildren(), isClean()); mutation_ = null; } return mutationBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { if (conditionBuilder_ == null) { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } else { return conditionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder setCondition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } condition_ = value; onChanged(); } else { conditionBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder setCondition( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { if (conditionBuilder_ == null) { condition_ = builderForValue.build(); onChanged(); } else { conditionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder mergeCondition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && condition_ != null && condition_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); } else { condition_ = value; } onChanged(); } else { conditionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder clearCondition() { if (conditionBuilder_ == null) { condition_ = null; onChanged(); } else { conditionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { bitField0_ |= 0x00000004; onChanged(); return getConditionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { if (conditionBuilder_ != null) { return conditionBuilder_.getMessageOrBuilder(); } else { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder() { if (conditionBuilder_ == null) { conditionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder>( getCondition(), getParentForChildren(), isClean()); condition_ = null; } return conditionBuilder_; } private long nonceGroup_ ; /** * <code>optional uint64 nonce_group = 4;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 nonce_group = 4;</code> */ public long getNonceGroup() { return nonceGroup_; } /** * <code>optional uint64 nonce_group = 4;</code> */ public Builder setNonceGroup(long value) { bitField0_ |= 0x00000008; nonceGroup_ = value; onChanged(); return this; } /** * <code>optional uint64 nonce_group = 4;</code> */ public Builder clearNonceGroup() { bitField0_ = (bitField0_ & ~0x00000008); nonceGroup_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MutateRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.MutateRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutateRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MutateRequest>() { public MutateRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MutateRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutateRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutateRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MutateResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MutateResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional .hbase.pb.Result result = 1;</code> */ boolean hasResult(); /** * <code>optional .hbase.pb.Result result = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult(); /** * <code>optional .hbase.pb.Result result = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ boolean hasProcessed(); /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ boolean getProcessed(); } /** * Protobuf type {@code hbase.pb.MutateResponse} */ public static final class MutateResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MutateResponse) MutateResponseOrBuilder { // Use MutateResponse.newBuilder() to construct. private MutateResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MutateResponse() { processed_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MutateResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = result_.toBuilder(); } result_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(result_); result_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; processed_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.Builder.class); } private int bitField0_; public static final int RESULT_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_; /** * <code>optional .hbase.pb.Result result = 1;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } public static final int PROCESSED_FIELD_NUMBER = 2; private boolean processed_; /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean getProcessed() { return processed_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getResult()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, processed_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getResult()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, processed_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) obj; boolean result = true; result = result && (hasResult() == other.hasResult()); if (hasResult()) { result = result && getResult() .equals(other.getResult()); } result = result && (hasProcessed() == other.hasProcessed()); if (hasProcessed()) { result = result && (getProcessed() == other.getProcessed()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasResult()) { hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); } if (hasProcessed()) { hash = (37 * hash) + PROCESSED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getProcessed()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MutateResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MutateResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResultFieldBuilder(); } } public Builder clear() { super.clear(); if (resultBuilder_ == null) { result_ = null; } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); processed_ = false; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MutateResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (resultBuilder_ == null) { result.result_ = result_; } else { result.result_ = resultBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.processed_ = processed_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; if (other.hasResult()) { mergeResult(other.getResult()); } if (other.hasProcessed()) { setProcessed(other.getProcessed()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; /** * <code>optional .hbase.pb.Result result = 1;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } else { return resultBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder setResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } result_ = value; onChanged(); } else { resultBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder setResult( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); } else { resultBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder mergeResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && result_ != null && result_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); } else { result_ = value; } onChanged(); } else { resultBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = null; onChanged(); } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResultFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Result result = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } } /** * <code>optional .hbase.pb.Result result = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { resultBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( getResult(), getParentForChildren(), isClean()); result_ = null; } return resultBuilder_; } private boolean processed_ ; /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean getProcessed() { return processed_; } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public Builder setProcessed(boolean value) { bitField0_ |= 0x00000002; processed_ = value; onChanged(); return this; } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public Builder clearProcessed() { bitField0_ = (bitField0_ & ~0x00000002); processed_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MutateResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.MutateResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutateResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MutateResponse>() { public MutateResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MutateResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutateResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MutateResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ScanOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Scan) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.Column column = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> getColumnList(); /** * <code>repeated .hbase.pb.Column column = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getColumn(int index); /** * <code>repeated .hbase.pb.Column column = 1;</code> */ int getColumnCount(); /** * <code>repeated .hbase.pb.Column column = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList(); /** * <code>repeated .hbase.pb.Column column = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ int getAttributeCount(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList(); /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); /** * <code>optional bytes start_row = 3;</code> */ boolean hasStartRow(); /** * <code>optional bytes start_row = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartRow(); /** * <code>optional bytes stop_row = 4;</code> */ boolean hasStopRow(); /** * <code>optional bytes stop_row = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStopRow(); /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ boolean hasFilter(); /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter(); /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ boolean hasTimeRange(); /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ boolean hasMaxVersions(); /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ int getMaxVersions(); /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ boolean hasCacheBlocks(); /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ boolean getCacheBlocks(); /** * <code>optional uint32 batch_size = 9;</code> */ boolean hasBatchSize(); /** * <code>optional uint32 batch_size = 9;</code> */ int getBatchSize(); /** * <code>optional uint64 max_result_size = 10;</code> */ boolean hasMaxResultSize(); /** * <code>optional uint64 max_result_size = 10;</code> */ long getMaxResultSize(); /** * <code>optional uint32 store_limit = 11;</code> */ boolean hasStoreLimit(); /** * <code>optional uint32 store_limit = 11;</code> */ int getStoreLimit(); /** * <code>optional uint32 store_offset = 12;</code> */ boolean hasStoreOffset(); /** * <code>optional uint32 store_offset = 12;</code> */ int getStoreOffset(); /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ boolean hasLoadColumnFamiliesOnDemand(); /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ boolean getLoadColumnFamiliesOnDemand(); /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated boolean hasSmall(); /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated boolean getSmall(); /** * <code>optional bool reversed = 15 [default = false];</code> */ boolean hasReversed(); /** * <code>optional bool reversed = 15 [default = false];</code> */ boolean getReversed(); /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ boolean hasConsistency(); /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency(); /** * <code>optional uint32 caching = 17;</code> */ boolean hasCaching(); /** * <code>optional uint32 caching = 17;</code> */ int getCaching(); /** * <code>optional bool allow_partial_results = 18;</code> */ boolean hasAllowPartialResults(); /** * <code>optional bool allow_partial_results = 18;</code> */ boolean getAllowPartialResults(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ int getCfTimeRangeCount(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList(); /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( int index); /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ boolean hasMvccReadPoint(); /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ long getMvccReadPoint(); /** * <code>optional bool include_start_row = 21 [default = true];</code> */ boolean hasIncludeStartRow(); /** * <code>optional bool include_start_row = 21 [default = true];</code> */ boolean getIncludeStartRow(); /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ boolean hasIncludeStopRow(); /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ boolean getIncludeStopRow(); /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ boolean hasReadType(); /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType getReadType(); } /** * <pre> ** * Instead of get from a table, you can scan it with optional filters. * You can specify the row key range, time range, the columns/families * to scan and so on. * This scan is used the first time in a scan request. The response of * the initial scan will return a scanner id, which should be used to * fetch result batches later on before it is closed. * </pre> * * Protobuf type {@code hbase.pb.Scan} */ public static final class Scan extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Scan) ScanOrBuilder { // Use Scan.newBuilder() to construct. private Scan(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Scan() { column_ = java.util.Collections.emptyList(); attribute_ = java.util.Collections.emptyList(); startRow_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; stopRow_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; maxVersions_ = 1; cacheBlocks_ = true; batchSize_ = 0; maxResultSize_ = 0L; storeLimit_ = 0; storeOffset_ = 0; loadColumnFamiliesOnDemand_ = false; small_ = false; reversed_ = false; consistency_ = 0; caching_ = 0; allowPartialResults_ = false; cfTimeRange_ = java.util.Collections.emptyList(); mvccReadPoint_ = 0L; includeStartRow_ = true; includeStopRow_ = false; readType_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { column_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column>(); mutable_bitField0_ |= 0x00000001; } column_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair>(); mutable_bitField0_ |= 0x00000002; } attribute_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); break; } case 26: { bitField0_ |= 0x00000001; startRow_ = input.readBytes(); break; } case 34: { bitField0_ |= 0x00000002; stopRow_ = input.readBytes(); break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = filter_.toBuilder(); } filter_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(filter_); filter_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 50: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; if (((bitField0_ & 0x00000008) == 0x00000008)) { subBuilder = timeRange_.toBuilder(); } timeRange_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timeRange_); timeRange_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 56: { bitField0_ |= 0x00000010; maxVersions_ = input.readUInt32(); break; } case 64: { bitField0_ |= 0x00000020; cacheBlocks_ = input.readBool(); break; } case 72: { bitField0_ |= 0x00000040; batchSize_ = input.readUInt32(); break; } case 80: { bitField0_ |= 0x00000080; maxResultSize_ = input.readUInt64(); break; } case 88: { bitField0_ |= 0x00000100; storeLimit_ = input.readUInt32(); break; } case 96: { bitField0_ |= 0x00000200; storeOffset_ = input.readUInt32(); break; } case 104: { bitField0_ |= 0x00000400; loadColumnFamiliesOnDemand_ = input.readBool(); break; } case 112: { bitField0_ |= 0x00000800; small_ = input.readBool(); break; } case 120: { bitField0_ |= 0x00001000; reversed_ = input.readBool(); break; } case 128: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(16, rawValue); } else { bitField0_ |= 0x00002000; consistency_ = rawValue; } break; } case 136: { bitField0_ |= 0x00004000; caching_ = input.readUInt32(); break; } case 144: { bitField0_ |= 0x00008000; allowPartialResults_ = input.readBool(); break; } case 154: { if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) { cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(); mutable_bitField0_ |= 0x00040000; } cfTimeRange_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); break; } case 160: { bitField0_ |= 0x00010000; mvccReadPoint_ = input.readUInt64(); break; } case 168: { bitField0_ |= 0x00020000; includeStartRow_ = input.readBool(); break; } case 176: { bitField0_ |= 0x00040000; includeStopRow_ = input.readBool(); break; } case 184: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType value = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(23, rawValue); } else { bitField0_ |= 0x00080000; readType_ = rawValue; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { column_ = java.util.Collections.unmodifiableList(column_); } if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attribute_ = java.util.Collections.unmodifiableList(attribute_); } if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) { cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder.class); } /** * Protobuf enum {@code hbase.pb.Scan.ReadType} */ public enum ReadType implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * <code>DEFAULT = 0;</code> */ DEFAULT(0), /** * <code>STREAM = 1;</code> */ STREAM(1), /** * <code>PREAD = 2;</code> */ PREAD(2), ; /** * <code>DEFAULT = 0;</code> */ public static final int DEFAULT_VALUE = 0; /** * <code>STREAM = 1;</code> */ public static final int STREAM_VALUE = 1; /** * <code>PREAD = 2;</code> */ public static final int PREAD_VALUE = 2; public final int getNumber() { return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ReadType valueOf(int value) { return forNumber(value); } public static ReadType forNumber(int value) { switch (value) { case 0: return DEFAULT; case 1: return STREAM; case 2: return PREAD; default: return null; } } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<ReadType> internalGetValueMap() { return internalValueMap; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< ReadType> internalValueMap = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<ReadType>() { public ReadType findValueByNumber(int number) { return ReadType.forNumber(number); } }; public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDescriptor().getEnumTypes().get(0); } private static final ReadType[] VALUES = values(); public static ReadType valueOf( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int value; private ReadType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.Scan.ReadType) } private int bitField0_; public static final int COLUMN_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> column_; /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> getColumnList() { return column_; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList() { return column_; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public int getColumnCount() { return column_.size(); } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getColumn(int index) { return column_.get(index); } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { return column_.get(index); } public static final int ATTRIBUTE_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> attribute_; /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { return attribute_; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList() { return attribute_; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public int getAttributeCount() { return attribute_.size(); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } public static final int START_ROW_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString startRow_; /** * <code>optional bytes start_row = 3;</code> */ public boolean hasStartRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes start_row = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartRow() { return startRow_; } public static final int STOP_ROW_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString stopRow_; /** * <code>optional bytes stop_row = 4;</code> */ public boolean hasStopRow() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes stop_row = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStopRow() { return stopRow_; } public static final int FILTER_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_; /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } public static final int TIME_RANGE_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_; /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } public static final int MAX_VERSIONS_FIELD_NUMBER = 7; private int maxVersions_; /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ public int getMaxVersions() { return maxVersions_; } public static final int CACHE_BLOCKS_FIELD_NUMBER = 8; private boolean cacheBlocks_; /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ public boolean getCacheBlocks() { return cacheBlocks_; } public static final int BATCH_SIZE_FIELD_NUMBER = 9; private int batchSize_; /** * <code>optional uint32 batch_size = 9;</code> */ public boolean hasBatchSize() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 batch_size = 9;</code> */ public int getBatchSize() { return batchSize_; } public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10; private long maxResultSize_; /** * <code>optional uint64 max_result_size = 10;</code> */ public boolean hasMaxResultSize() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint64 max_result_size = 10;</code> */ public long getMaxResultSize() { return maxResultSize_; } public static final int STORE_LIMIT_FIELD_NUMBER = 11; private int storeLimit_; /** * <code>optional uint32 store_limit = 11;</code> */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint32 store_limit = 11;</code> */ public int getStoreLimit() { return storeLimit_; } public static final int STORE_OFFSET_FIELD_NUMBER = 12; private int storeOffset_; /** * <code>optional uint32 store_offset = 12;</code> */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional uint32 store_offset = 12;</code> */ public int getStoreOffset() { return storeOffset_; } public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13; private boolean loadColumnFamiliesOnDemand_; /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } public static final int SMALL_FIELD_NUMBER = 14; private boolean small_; /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasSmall() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated public boolean getSmall() { return small_; } public static final int REVERSED_FIELD_NUMBER = 15; private boolean reversed_; /** * <code>optional bool reversed = 15 [default = false];</code> */ public boolean hasReversed() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <code>optional bool reversed = 15 [default = false];</code> */ public boolean getReversed() { return reversed_; } public static final int CONSISTENCY_FIELD_NUMBER = 16; private int consistency_; /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ public boolean hasConsistency() { return ((bitField0_ & 0x00002000) == 0x00002000); } /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } public static final int CACHING_FIELD_NUMBER = 17; private int caching_; /** * <code>optional uint32 caching = 17;</code> */ public boolean hasCaching() { return ((bitField0_ & 0x00004000) == 0x00004000); } /** * <code>optional uint32 caching = 17;</code> */ public int getCaching() { return caching_; } public static final int ALLOW_PARTIAL_RESULTS_FIELD_NUMBER = 18; private boolean allowPartialResults_; /** * <code>optional bool allow_partial_results = 18;</code> */ public boolean hasAllowPartialResults() { return ((bitField0_ & 0x00008000) == 0x00008000); } /** * <code>optional bool allow_partial_results = 18;</code> */ public boolean getAllowPartialResults() { return allowPartialResults_; } public static final int CF_TIME_RANGE_FIELD_NUMBER = 19; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_; /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { return cfTimeRange_; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList() { return cfTimeRange_; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public int getCfTimeRangeCount() { return cfTimeRange_.size(); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { return cfTimeRange_.get(index); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( int index) { return cfTimeRange_.get(index); } public static final int MVCC_READ_POINT_FIELD_NUMBER = 20; private long mvccReadPoint_; /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ public boolean hasMvccReadPoint() { return ((bitField0_ & 0x00010000) == 0x00010000); } /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ public long getMvccReadPoint() { return mvccReadPoint_; } public static final int INCLUDE_START_ROW_FIELD_NUMBER = 21; private boolean includeStartRow_; /** * <code>optional bool include_start_row = 21 [default = true];</code> */ public boolean hasIncludeStartRow() { return ((bitField0_ & 0x00020000) == 0x00020000); } /** * <code>optional bool include_start_row = 21 [default = true];</code> */ public boolean getIncludeStartRow() { return includeStartRow_; } public static final int INCLUDE_STOP_ROW_FIELD_NUMBER = 22; private boolean includeStopRow_; /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ public boolean hasIncludeStopRow() { return ((bitField0_ & 0x00040000) == 0x00040000); } /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ public boolean getIncludeStopRow() { return includeStopRow_; } public static final int READTYPE_FIELD_NUMBER = 23; private int readType_; /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ public boolean hasReadType() { return ((bitField0_ & 0x00080000) == 0x00080000); } /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType getReadType() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType.valueOf(readType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasFilter()) { if (!getFilter().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getCfTimeRangeCount(); i++) { if (!getCfTimeRange(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < column_.size(); i++) { output.writeMessage(1, column_.get(i)); } for (int i = 0; i < attribute_.size(); i++) { output.writeMessage(2, attribute_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(3, startRow_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(4, stopRow_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(5, getFilter()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeMessage(6, getTimeRange()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeUInt32(7, maxVersions_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(8, cacheBlocks_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt32(9, batchSize_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt64(10, maxResultSize_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeUInt32(11, storeLimit_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeUInt32(12, storeOffset_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { output.writeBool(13, loadColumnFamiliesOnDemand_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { output.writeBool(14, small_); } if (((bitField0_ & 0x00001000) == 0x00001000)) { output.writeBool(15, reversed_); } if (((bitField0_ & 0x00002000) == 0x00002000)) { output.writeEnum(16, consistency_); } if (((bitField0_ & 0x00004000) == 0x00004000)) { output.writeUInt32(17, caching_); } if (((bitField0_ & 0x00008000) == 0x00008000)) { output.writeBool(18, allowPartialResults_); } for (int i = 0; i < cfTimeRange_.size(); i++) { output.writeMessage(19, cfTimeRange_.get(i)); } if (((bitField0_ & 0x00010000) == 0x00010000)) { output.writeUInt64(20, mvccReadPoint_); } if (((bitField0_ & 0x00020000) == 0x00020000)) { output.writeBool(21, includeStartRow_); } if (((bitField0_ & 0x00040000) == 0x00040000)) { output.writeBool(22, includeStopRow_); } if (((bitField0_ & 0x00080000) == 0x00080000)) { output.writeEnum(23, readType_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < column_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, column_.get(i)); } for (int i = 0; i < attribute_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, attribute_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(3, startRow_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(4, stopRow_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getFilter()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(6, getTimeRange()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(7, maxVersions_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(8, cacheBlocks_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(9, batchSize_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(10, maxResultSize_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(11, storeLimit_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(12, storeOffset_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(13, loadColumnFamiliesOnDemand_); } if (((bitField0_ & 0x00000800) == 0x00000800)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(14, small_); } if (((bitField0_ & 0x00001000) == 0x00001000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(15, reversed_); } if (((bitField0_ & 0x00002000) == 0x00002000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(16, consistency_); } if (((bitField0_ & 0x00004000) == 0x00004000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(17, caching_); } if (((bitField0_ & 0x00008000) == 0x00008000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(18, allowPartialResults_); } for (int i = 0; i < cfTimeRange_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(19, cfTimeRange_.get(i)); } if (((bitField0_ & 0x00010000) == 0x00010000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(20, mvccReadPoint_); } if (((bitField0_ & 0x00020000) == 0x00020000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(21, includeStartRow_); } if (((bitField0_ & 0x00040000) == 0x00040000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(22, includeStopRow_); } if (((bitField0_ & 0x00080000) == 0x00080000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(23, readType_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan) obj; boolean result = true; result = result && getColumnList() .equals(other.getColumnList()); result = result && getAttributeList() .equals(other.getAttributeList()); result = result && (hasStartRow() == other.hasStartRow()); if (hasStartRow()) { result = result && getStartRow() .equals(other.getStartRow()); } result = result && (hasStopRow() == other.hasStopRow()); if (hasStopRow()) { result = result && getStopRow() .equals(other.getStopRow()); } result = result && (hasFilter() == other.hasFilter()); if (hasFilter()) { result = result && getFilter() .equals(other.getFilter()); } result = result && (hasTimeRange() == other.hasTimeRange()); if (hasTimeRange()) { result = result && getTimeRange() .equals(other.getTimeRange()); } result = result && (hasMaxVersions() == other.hasMaxVersions()); if (hasMaxVersions()) { result = result && (getMaxVersions() == other.getMaxVersions()); } result = result && (hasCacheBlocks() == other.hasCacheBlocks()); if (hasCacheBlocks()) { result = result && (getCacheBlocks() == other.getCacheBlocks()); } result = result && (hasBatchSize() == other.hasBatchSize()); if (hasBatchSize()) { result = result && (getBatchSize() == other.getBatchSize()); } result = result && (hasMaxResultSize() == other.hasMaxResultSize()); if (hasMaxResultSize()) { result = result && (getMaxResultSize() == other.getMaxResultSize()); } result = result && (hasStoreLimit() == other.hasStoreLimit()); if (hasStoreLimit()) { result = result && (getStoreLimit() == other.getStoreLimit()); } result = result && (hasStoreOffset() == other.hasStoreOffset()); if (hasStoreOffset()) { result = result && (getStoreOffset() == other.getStoreOffset()); } result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand()); if (hasLoadColumnFamiliesOnDemand()) { result = result && (getLoadColumnFamiliesOnDemand() == other.getLoadColumnFamiliesOnDemand()); } result = result && (hasSmall() == other.hasSmall()); if (hasSmall()) { result = result && (getSmall() == other.getSmall()); } result = result && (hasReversed() == other.hasReversed()); if (hasReversed()) { result = result && (getReversed() == other.getReversed()); } result = result && (hasConsistency() == other.hasConsistency()); if (hasConsistency()) { result = result && consistency_ == other.consistency_; } result = result && (hasCaching() == other.hasCaching()); if (hasCaching()) { result = result && (getCaching() == other.getCaching()); } result = result && (hasAllowPartialResults() == other.hasAllowPartialResults()); if (hasAllowPartialResults()) { result = result && (getAllowPartialResults() == other.getAllowPartialResults()); } result = result && getCfTimeRangeList() .equals(other.getCfTimeRangeList()); result = result && (hasMvccReadPoint() == other.hasMvccReadPoint()); if (hasMvccReadPoint()) { result = result && (getMvccReadPoint() == other.getMvccReadPoint()); } result = result && (hasIncludeStartRow() == other.hasIncludeStartRow()); if (hasIncludeStartRow()) { result = result && (getIncludeStartRow() == other.getIncludeStartRow()); } result = result && (hasIncludeStopRow() == other.hasIncludeStopRow()); if (hasIncludeStopRow()) { result = result && (getIncludeStopRow() == other.getIncludeStopRow()); } result = result && (hasReadType() == other.hasReadType()); if (hasReadType()) { result = result && readType_ == other.readType_; } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getColumnCount() > 0) { hash = (37 * hash) + COLUMN_FIELD_NUMBER; hash = (53 * hash) + getColumnList().hashCode(); } if (getAttributeCount() > 0) { hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; hash = (53 * hash) + getAttributeList().hashCode(); } if (hasStartRow()) { hash = (37 * hash) + START_ROW_FIELD_NUMBER; hash = (53 * hash) + getStartRow().hashCode(); } if (hasStopRow()) { hash = (37 * hash) + STOP_ROW_FIELD_NUMBER; hash = (53 * hash) + getStopRow().hashCode(); } if (hasFilter()) { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); } if (hasTimeRange()) { hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getTimeRange().hashCode(); } if (hasMaxVersions()) { hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER; hash = (53 * hash) + getMaxVersions(); } if (hasCacheBlocks()) { hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getCacheBlocks()); } if (hasBatchSize()) { hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER; hash = (53 * hash) + getBatchSize(); } if (hasMaxResultSize()) { hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getMaxResultSize()); } if (hasStoreLimit()) { hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER; hash = (53 * hash) + getStoreLimit(); } if (hasStoreOffset()) { hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getStoreOffset(); } if (hasLoadColumnFamiliesOnDemand()) { hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getLoadColumnFamiliesOnDemand()); } if (hasSmall()) { hash = (37 * hash) + SMALL_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getSmall()); } if (hasReversed()) { hash = (37 * hash) + REVERSED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getReversed()); } if (hasConsistency()) { hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER; hash = (53 * hash) + consistency_; } if (hasCaching()) { hash = (37 * hash) + CACHING_FIELD_NUMBER; hash = (53 * hash) + getCaching(); } if (hasAllowPartialResults()) { hash = (37 * hash) + ALLOW_PARTIAL_RESULTS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getAllowPartialResults()); } if (getCfTimeRangeCount() > 0) { hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER; hash = (53 * hash) + getCfTimeRangeList().hashCode(); } if (hasMvccReadPoint()) { hash = (37 * hash) + MVCC_READ_POINT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getMvccReadPoint()); } if (hasIncludeStartRow()) { hash = (37 * hash) + INCLUDE_START_ROW_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getIncludeStartRow()); } if (hasIncludeStopRow()) { hash = (37 * hash) + INCLUDE_STOP_ROW_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getIncludeStopRow()); } if (hasReadType()) { hash = (37 * hash) + READTYPE_FIELD_NUMBER; hash = (53 * hash) + readType_; } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Instead of get from a table, you can scan it with optional filters. * You can specify the row key range, time range, the columns/families * to scan and so on. * This scan is used the first time in a scan request. The response of * the initial scan will return a scanner id, which should be used to * fetch result batches later on before it is closed. * </pre> * * Protobuf type {@code hbase.pb.Scan} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Scan) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getColumnFieldBuilder(); getAttributeFieldBuilder(); getFilterFieldBuilder(); getTimeRangeFieldBuilder(); getCfTimeRangeFieldBuilder(); } } public Builder clear() { super.clear(); if (columnBuilder_ == null) { column_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { columnBuilder_.clear(); } if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { attributeBuilder_.clear(); } startRow_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); stopRow_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); if (filterBuilder_ == null) { filter_ = null; } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); if (timeRangeBuilder_ == null) { timeRange_ = null; } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); maxVersions_ = 1; bitField0_ = (bitField0_ & ~0x00000040); cacheBlocks_ = true; bitField0_ = (bitField0_ & ~0x00000080); batchSize_ = 0; bitField0_ = (bitField0_ & ~0x00000100); maxResultSize_ = 0L; bitField0_ = (bitField0_ & ~0x00000200); storeLimit_ = 0; bitField0_ = (bitField0_ & ~0x00000400); storeOffset_ = 0; bitField0_ = (bitField0_ & ~0x00000800); loadColumnFamiliesOnDemand_ = false; bitField0_ = (bitField0_ & ~0x00001000); small_ = false; bitField0_ = (bitField0_ & ~0x00002000); reversed_ = false; bitField0_ = (bitField0_ & ~0x00004000); consistency_ = 0; bitField0_ = (bitField0_ & ~0x00008000); caching_ = 0; bitField0_ = (bitField0_ & ~0x00010000); allowPartialResults_ = false; bitField0_ = (bitField0_ & ~0x00020000); if (cfTimeRangeBuilder_ == null) { cfTimeRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00040000); } else { cfTimeRangeBuilder_.clear(); } mvccReadPoint_ = 0L; bitField0_ = (bitField0_ & ~0x00080000); includeStartRow_ = true; bitField0_ = (bitField0_ & ~0x00100000); includeStopRow_ = false; bitField0_ = (bitField0_ & ~0x00200000); readType_ = 0; bitField0_ = (bitField0_ & ~0x00400000); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Scan_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (columnBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { column_ = java.util.Collections.unmodifiableList(column_); bitField0_ = (bitField0_ & ~0x00000001); } result.column_ = column_; } else { result.column_ = columnBuilder_.build(); } if (attributeBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { attribute_ = java.util.Collections.unmodifiableList(attribute_); bitField0_ = (bitField0_ & ~0x00000002); } result.attribute_ = attribute_; } else { result.attribute_ = attributeBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000001; } result.startRow_ = startRow_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000002; } result.stopRow_ = stopRow_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000004; } if (filterBuilder_ == null) { result.filter_ = filter_; } else { result.filter_ = filterBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000008; } if (timeRangeBuilder_ == null) { result.timeRange_ = timeRange_; } else { result.timeRange_ = timeRangeBuilder_.build(); } if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000010; } result.maxVersions_ = maxVersions_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000020; } result.cacheBlocks_ = cacheBlocks_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000040; } result.batchSize_ = batchSize_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000080; } result.maxResultSize_ = maxResultSize_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000100; } result.storeLimit_ = storeLimit_; if (((from_bitField0_ & 0x00000800) == 0x00000800)) { to_bitField0_ |= 0x00000200; } result.storeOffset_ = storeOffset_; if (((from_bitField0_ & 0x00001000) == 0x00001000)) { to_bitField0_ |= 0x00000400; } result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_; if (((from_bitField0_ & 0x00002000) == 0x00002000)) { to_bitField0_ |= 0x00000800; } result.small_ = small_; if (((from_bitField0_ & 0x00004000) == 0x00004000)) { to_bitField0_ |= 0x00001000; } result.reversed_ = reversed_; if (((from_bitField0_ & 0x00008000) == 0x00008000)) { to_bitField0_ |= 0x00002000; } result.consistency_ = consistency_; if (((from_bitField0_ & 0x00010000) == 0x00010000)) { to_bitField0_ |= 0x00004000; } result.caching_ = caching_; if (((from_bitField0_ & 0x00020000) == 0x00020000)) { to_bitField0_ |= 0x00008000; } result.allowPartialResults_ = allowPartialResults_; if (cfTimeRangeBuilder_ == null) { if (((bitField0_ & 0x00040000) == 0x00040000)) { cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); bitField0_ = (bitField0_ & ~0x00040000); } result.cfTimeRange_ = cfTimeRange_; } else { result.cfTimeRange_ = cfTimeRangeBuilder_.build(); } if (((from_bitField0_ & 0x00080000) == 0x00080000)) { to_bitField0_ |= 0x00010000; } result.mvccReadPoint_ = mvccReadPoint_; if (((from_bitField0_ & 0x00100000) == 0x00100000)) { to_bitField0_ |= 0x00020000; } result.includeStartRow_ = includeStartRow_; if (((from_bitField0_ & 0x00200000) == 0x00200000)) { to_bitField0_ |= 0x00040000; } result.includeStopRow_ = includeStopRow_; if (((from_bitField0_ & 0x00400000) == 0x00400000)) { to_bitField0_ |= 0x00080000; } result.readType_ = readType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; if (columnBuilder_ == null) { if (!other.column_.isEmpty()) { if (column_.isEmpty()) { column_ = other.column_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureColumnIsMutable(); column_.addAll(other.column_); } onChanged(); } } else { if (!other.column_.isEmpty()) { if (columnBuilder_.isEmpty()) { columnBuilder_.dispose(); columnBuilder_ = null; column_ = other.column_; bitField0_ = (bitField0_ & ~0x00000001); columnBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnFieldBuilder() : null; } else { columnBuilder_.addAllMessages(other.column_); } } } if (attributeBuilder_ == null) { if (!other.attribute_.isEmpty()) { if (attribute_.isEmpty()) { attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAttributeIsMutable(); attribute_.addAll(other.attribute_); } onChanged(); } } else { if (!other.attribute_.isEmpty()) { if (attributeBuilder_.isEmpty()) { attributeBuilder_.dispose(); attributeBuilder_ = null; attribute_ = other.attribute_; bitField0_ = (bitField0_ & ~0x00000002); attributeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttributeFieldBuilder() : null; } else { attributeBuilder_.addAllMessages(other.attribute_); } } } if (other.hasStartRow()) { setStartRow(other.getStartRow()); } if (other.hasStopRow()) { setStopRow(other.getStopRow()); } if (other.hasFilter()) { mergeFilter(other.getFilter()); } if (other.hasTimeRange()) { mergeTimeRange(other.getTimeRange()); } if (other.hasMaxVersions()) { setMaxVersions(other.getMaxVersions()); } if (other.hasCacheBlocks()) { setCacheBlocks(other.getCacheBlocks()); } if (other.hasBatchSize()) { setBatchSize(other.getBatchSize()); } if (other.hasMaxResultSize()) { setMaxResultSize(other.getMaxResultSize()); } if (other.hasStoreLimit()) { setStoreLimit(other.getStoreLimit()); } if (other.hasStoreOffset()) { setStoreOffset(other.getStoreOffset()); } if (other.hasLoadColumnFamiliesOnDemand()) { setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand()); } if (other.hasSmall()) { setSmall(other.getSmall()); } if (other.hasReversed()) { setReversed(other.getReversed()); } if (other.hasConsistency()) { setConsistency(other.getConsistency()); } if (other.hasCaching()) { setCaching(other.getCaching()); } if (other.hasAllowPartialResults()) { setAllowPartialResults(other.getAllowPartialResults()); } if (cfTimeRangeBuilder_ == null) { if (!other.cfTimeRange_.isEmpty()) { if (cfTimeRange_.isEmpty()) { cfTimeRange_ = other.cfTimeRange_; bitField0_ = (bitField0_ & ~0x00040000); } else { ensureCfTimeRangeIsMutable(); cfTimeRange_.addAll(other.cfTimeRange_); } onChanged(); } } else { if (!other.cfTimeRange_.isEmpty()) { if (cfTimeRangeBuilder_.isEmpty()) { cfTimeRangeBuilder_.dispose(); cfTimeRangeBuilder_ = null; cfTimeRange_ = other.cfTimeRange_; bitField0_ = (bitField0_ & ~0x00040000); cfTimeRangeBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCfTimeRangeFieldBuilder() : null; } else { cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_); } } } if (other.hasMvccReadPoint()) { setMvccReadPoint(other.getMvccReadPoint()); } if (other.hasIncludeStartRow()) { setIncludeStartRow(other.getIncludeStartRow()); } if (other.hasIncludeStopRow()) { setIncludeStopRow(other.getIncludeStopRow()); } if (other.hasReadType()) { setReadType(other.getReadType()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getColumnCount(); i++) { if (!getColumn(i).isInitialized()) { return false; } } for (int i = 0; i < getAttributeCount(); i++) { if (!getAttribute(i).isInitialized()) { return false; } } if (hasFilter()) { if (!getFilter().isInitialized()) { return false; } } for (int i = 0; i < getCfTimeRangeCount(); i++) { if (!getCfTimeRange(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> column_ = java.util.Collections.emptyList(); private void ensureColumnIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { column_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column>(column_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> getColumnList() { if (columnBuilder_ == null) { return java.util.Collections.unmodifiableList(column_); } else { return columnBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public int getColumnCount() { if (columnBuilder_ == null) { return column_.size(); } else { return columnBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column getColumn(int index) { if (columnBuilder_ == null) { return column_.get(index); } else { return columnBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder setColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnIsMutable(); column_.set(index, value); onChanged(); } else { columnBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder setColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.set(index, builderForValue.build()); onChanged(); } else { columnBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder addColumn(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnIsMutable(); column_.add(value); onChanged(); } else { columnBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder addColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnIsMutable(); column_.add(index, value); onChanged(); } else { columnBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder addColumn( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(builderForValue.build()); onChanged(); } else { columnBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder addColumn( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(index, builderForValue.build()); onChanged(); } else { columnBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder addAllColumn( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column> values) { if (columnBuilder_ == null) { ensureColumnIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, column_); onChanged(); } else { columnBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder clearColumn() { if (columnBuilder_ == null) { column_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { columnBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public Builder removeColumn(int index) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.remove(index); onChanged(); } else { columnBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( int index) { return getColumnFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { if (columnBuilder_ == null) { return column_.get(index); } else { return columnBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList() { if (columnBuilder_ != null) { return columnBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(column_); } } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { return getColumnFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( int index) { return getColumnFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Column column = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder> getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder() { if (columnBuilder_ == null) { columnBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ColumnOrBuilder>( column_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); column_ = null; } return columnBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); } else { return attributeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public int getAttributeCount() { if (attributeBuilder_ == null) { return attribute_.size(); } else { return attributeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder setAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.set(index, value); onChanged(); } else { attributeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder setAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.set(index, builderForValue.build()); onChanged(); } else { attributeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder addAttribute(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.add(value); onChanged(); } else { attributeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder addAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttributeIsMutable(); attribute_.add(index, value); onChanged(); } else { attributeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder addAttribute( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(builderForValue.build()); onChanged(); } else { attributeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder addAttribute( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(index, builderForValue.build()); onChanged(); } else { attributeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair> values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, attribute_); onChanged(); } else { attributeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder clearAttribute() { if (attributeBuilder_ == null) { attribute_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { attributeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public Builder removeAttribute(int index) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.remove(index); onChanged(); } else { attributeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList() { if (attributeBuilder_ != null) { return attributeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attribute_); } } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } /** * <code>repeated .hbase.pb.NameBytesPair attribute = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder> getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { attributeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); attribute_ = null; } return attributeBuilder_; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString startRow_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes start_row = 3;</code> */ public boolean hasStartRow() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bytes start_row = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStartRow() { return startRow_; } /** * <code>optional bytes start_row = 3;</code> */ public Builder setStartRow(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; startRow_ = value; onChanged(); return this; } /** * <code>optional bytes start_row = 3;</code> */ public Builder clearStartRow() { bitField0_ = (bitField0_ & ~0x00000004); startRow_ = getDefaultInstance().getStartRow(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString stopRow_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes stop_row = 4;</code> */ public boolean hasStopRow() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bytes stop_row = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getStopRow() { return stopRow_; } /** * <code>optional bytes stop_row = 4;</code> */ public Builder setStopRow(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; stopRow_ = value; onChanged(); return this; } /** * <code>optional bytes stop_row = 4;</code> */ public Builder clearStopRow() { bitField0_ = (bitField0_ & ~0x00000008); stopRow_ = getDefaultInstance().getStopRow(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter filter_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public boolean hasFilter() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter getFilter() { if (filterBuilder_ == null) { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } else { return filterBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public Builder setFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); } else { filterBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public Builder setFilter( org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); } else { filterBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public Builder mergeFilter(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && filter_ != null && filter_ != org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { filter_ = org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } onChanged(); } else { filterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public Builder clearFilter() { if (filterBuilder_ == null) { filter_ = null; onChanged(); } else { filterBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { bitField0_ |= 0x00000010; onChanged(); return getFilterFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { return filter_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.getDefaultInstance() : filter_; } } /** * <code>optional .hbase.pb.Filter filter = 5;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterOrBuilder>( getFilter(), getParentForChildren(), isClean()); filter_ = null; } return filterBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange timeRange_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public boolean hasTimeRange() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { if (timeRangeBuilder_ == null) { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } else { return timeRangeBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public Builder setTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeRange_ = value; onChanged(); } else { timeRangeBuilder_.setMessage(value); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public Builder setTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { if (timeRangeBuilder_ == null) { timeRange_ = builderForValue.build(); onChanged(); } else { timeRangeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public Builder mergeTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange value) { if (timeRangeBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && timeRange_ != null && timeRange_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { timeRange_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); } else { timeRange_ = value; } onChanged(); } else { timeRangeBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public Builder clearTimeRange() { if (timeRangeBuilder_ == null) { timeRange_ = null; onChanged(); } else { timeRangeBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { bitField0_ |= 0x00000020; onChanged(); return getTimeRangeFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { if (timeRangeBuilder_ != null) { return timeRangeBuilder_.getMessageOrBuilder(); } else { return timeRange_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance() : timeRange_; } } /** * <code>optional .hbase.pb.TimeRange time_range = 6;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder() { if (timeRangeBuilder_ == null) { timeRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( getTimeRange(), getParentForChildren(), isClean()); timeRange_ = null; } return timeRangeBuilder_; } private int maxVersions_ = 1; /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ public boolean hasMaxVersions() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ public int getMaxVersions() { return maxVersions_; } /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ public Builder setMaxVersions(int value) { bitField0_ |= 0x00000040; maxVersions_ = value; onChanged(); return this; } /** * <code>optional uint32 max_versions = 7 [default = 1];</code> */ public Builder clearMaxVersions() { bitField0_ = (bitField0_ & ~0x00000040); maxVersions_ = 1; onChanged(); return this; } private boolean cacheBlocks_ = true; /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ public boolean hasCacheBlocks() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ public boolean getCacheBlocks() { return cacheBlocks_; } /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ public Builder setCacheBlocks(boolean value) { bitField0_ |= 0x00000080; cacheBlocks_ = value; onChanged(); return this; } /** * <code>optional bool cache_blocks = 8 [default = true];</code> */ public Builder clearCacheBlocks() { bitField0_ = (bitField0_ & ~0x00000080); cacheBlocks_ = true; onChanged(); return this; } private int batchSize_ ; /** * <code>optional uint32 batch_size = 9;</code> */ public boolean hasBatchSize() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint32 batch_size = 9;</code> */ public int getBatchSize() { return batchSize_; } /** * <code>optional uint32 batch_size = 9;</code> */ public Builder setBatchSize(int value) { bitField0_ |= 0x00000100; batchSize_ = value; onChanged(); return this; } /** * <code>optional uint32 batch_size = 9;</code> */ public Builder clearBatchSize() { bitField0_ = (bitField0_ & ~0x00000100); batchSize_ = 0; onChanged(); return this; } private long maxResultSize_ ; /** * <code>optional uint64 max_result_size = 10;</code> */ public boolean hasMaxResultSize() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional uint64 max_result_size = 10;</code> */ public long getMaxResultSize() { return maxResultSize_; } /** * <code>optional uint64 max_result_size = 10;</code> */ public Builder setMaxResultSize(long value) { bitField0_ |= 0x00000200; maxResultSize_ = value; onChanged(); return this; } /** * <code>optional uint64 max_result_size = 10;</code> */ public Builder clearMaxResultSize() { bitField0_ = (bitField0_ & ~0x00000200); maxResultSize_ = 0L; onChanged(); return this; } private int storeLimit_ ; /** * <code>optional uint32 store_limit = 11;</code> */ public boolean hasStoreLimit() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <code>optional uint32 store_limit = 11;</code> */ public int getStoreLimit() { return storeLimit_; } /** * <code>optional uint32 store_limit = 11;</code> */ public Builder setStoreLimit(int value) { bitField0_ |= 0x00000400; storeLimit_ = value; onChanged(); return this; } /** * <code>optional uint32 store_limit = 11;</code> */ public Builder clearStoreLimit() { bitField0_ = (bitField0_ & ~0x00000400); storeLimit_ = 0; onChanged(); return this; } private int storeOffset_ ; /** * <code>optional uint32 store_offset = 12;</code> */ public boolean hasStoreOffset() { return ((bitField0_ & 0x00000800) == 0x00000800); } /** * <code>optional uint32 store_offset = 12;</code> */ public int getStoreOffset() { return storeOffset_; } /** * <code>optional uint32 store_offset = 12;</code> */ public Builder setStoreOffset(int value) { bitField0_ |= 0x00000800; storeOffset_ = value; onChanged(); return this; } /** * <code>optional uint32 store_offset = 12;</code> */ public Builder clearStoreOffset() { bitField0_ = (bitField0_ & ~0x00000800); storeOffset_ = 0; onChanged(); return this; } private boolean loadColumnFamiliesOnDemand_ ; /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ public boolean hasLoadColumnFamiliesOnDemand() { return ((bitField0_ & 0x00001000) == 0x00001000); } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ public boolean getLoadColumnFamiliesOnDemand() { return loadColumnFamiliesOnDemand_; } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ public Builder setLoadColumnFamiliesOnDemand(boolean value) { bitField0_ |= 0x00001000; loadColumnFamiliesOnDemand_ = value; onChanged(); return this; } /** * <pre> * DO NOT add defaults to load_column_families_on_demand. * </pre> * * <code>optional bool load_column_families_on_demand = 13;</code> */ public Builder clearLoadColumnFamiliesOnDemand() { bitField0_ = (bitField0_ & ~0x00001000); loadColumnFamiliesOnDemand_ = false; onChanged(); return this; } private boolean small_ ; /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasSmall() { return ((bitField0_ & 0x00002000) == 0x00002000); } /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated public boolean getSmall() { return small_; } /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setSmall(boolean value) { bitField0_ |= 0x00002000; small_ = value; onChanged(); return this; } /** * <code>optional bool small = 14 [deprecated = true];</code> */ @java.lang.Deprecated public Builder clearSmall() { bitField0_ = (bitField0_ & ~0x00002000); small_ = false; onChanged(); return this; } private boolean reversed_ ; /** * <code>optional bool reversed = 15 [default = false];</code> */ public boolean hasReversed() { return ((bitField0_ & 0x00004000) == 0x00004000); } /** * <code>optional bool reversed = 15 [default = false];</code> */ public boolean getReversed() { return reversed_; } /** * <code>optional bool reversed = 15 [default = false];</code> */ public Builder setReversed(boolean value) { bitField0_ |= 0x00004000; reversed_ = value; onChanged(); return this; } /** * <code>optional bool reversed = 15 [default = false];</code> */ public Builder clearReversed() { bitField0_ = (bitField0_ & ~0x00004000); reversed_ = false; onChanged(); return this; } private int consistency_ = 0; /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ public boolean hasConsistency() { return ((bitField0_ & 0x00008000) == 0x00008000); } /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency getConsistency() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.valueOf(consistency_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency.STRONG : result; } /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ public Builder setConsistency(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Consistency value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00008000; consistency_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.Consistency consistency = 16 [default = STRONG];</code> */ public Builder clearConsistency() { bitField0_ = (bitField0_ & ~0x00008000); consistency_ = 0; onChanged(); return this; } private int caching_ ; /** * <code>optional uint32 caching = 17;</code> */ public boolean hasCaching() { return ((bitField0_ & 0x00010000) == 0x00010000); } /** * <code>optional uint32 caching = 17;</code> */ public int getCaching() { return caching_; } /** * <code>optional uint32 caching = 17;</code> */ public Builder setCaching(int value) { bitField0_ |= 0x00010000; caching_ = value; onChanged(); return this; } /** * <code>optional uint32 caching = 17;</code> */ public Builder clearCaching() { bitField0_ = (bitField0_ & ~0x00010000); caching_ = 0; onChanged(); return this; } private boolean allowPartialResults_ ; /** * <code>optional bool allow_partial_results = 18;</code> */ public boolean hasAllowPartialResults() { return ((bitField0_ & 0x00020000) == 0x00020000); } /** * <code>optional bool allow_partial_results = 18;</code> */ public boolean getAllowPartialResults() { return allowPartialResults_; } /** * <code>optional bool allow_partial_results = 18;</code> */ public Builder setAllowPartialResults(boolean value) { bitField0_ |= 0x00020000; allowPartialResults_ = value; onChanged(); return this; } /** * <code>optional bool allow_partial_results = 18;</code> */ public Builder clearAllowPartialResults() { bitField0_ = (bitField0_ & ~0x00020000); allowPartialResults_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ = java.util.Collections.emptyList(); private void ensureCfTimeRangeIsMutable() { if (!((bitField0_ & 0x00040000) == 0x00040000)) { cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_); bitField0_ |= 0x00040000; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_; /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { if (cfTimeRangeBuilder_ == null) { return java.util.Collections.unmodifiableList(cfTimeRange_); } else { return cfTimeRangeBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public int getCfTimeRangeCount() { if (cfTimeRangeBuilder_ == null) { return cfTimeRange_.size(); } else { return cfTimeRangeBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { if (cfTimeRangeBuilder_ == null) { return cfTimeRange_.get(index); } else { return cfTimeRangeBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder setCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { if (cfTimeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCfTimeRangeIsMutable(); cfTimeRange_.set(index, value); onChanged(); } else { cfTimeRangeBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder setCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.set(index, builderForValue.build()); onChanged(); } else { cfTimeRangeBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder addCfTimeRange(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { if (cfTimeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCfTimeRangeIsMutable(); cfTimeRange_.add(value); onChanged(); } else { cfTimeRangeBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder addCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { if (cfTimeRangeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCfTimeRangeIsMutable(); cfTimeRange_.add(index, value); onChanged(); } else { cfTimeRangeBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder addCfTimeRange( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.add(builderForValue.build()); onChanged(); } else { cfTimeRangeBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder addCfTimeRange( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.add(index, builderForValue.build()); onChanged(); } else { cfTimeRangeBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder addAllCfTimeRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, cfTimeRange_); onChanged(); } else { cfTimeRangeBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder clearCfTimeRange() { if (cfTimeRangeBuilder_ == null) { cfTimeRange_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00040000); onChanged(); } else { cfTimeRangeBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public Builder removeCfTimeRange(int index) { if (cfTimeRangeBuilder_ == null) { ensureCfTimeRangeIsMutable(); cfTimeRange_.remove(index); onChanged(); } else { cfTimeRangeBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder( int index) { return getCfTimeRangeFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( int index) { if (cfTimeRangeBuilder_ == null) { return cfTimeRange_.get(index); } else { return cfTimeRangeBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList() { if (cfTimeRangeBuilder_ != null) { return cfTimeRangeBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cfTimeRange_); } } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() { return getCfTimeRangeFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder( int index) { return getCfTimeRangeFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ColumnFamilyTimeRange cf_time_range = 19;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder> getCfTimeRangeBuilderList() { return getCfTimeRangeFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeFieldBuilder() { if (cfTimeRangeBuilder_ == null) { cfTimeRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>( cfTimeRange_, ((bitField0_ & 0x00040000) == 0x00040000), getParentForChildren(), isClean()); cfTimeRange_ = null; } return cfTimeRangeBuilder_; } private long mvccReadPoint_ ; /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ public boolean hasMvccReadPoint() { return ((bitField0_ & 0x00080000) == 0x00080000); } /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ public long getMvccReadPoint() { return mvccReadPoint_; } /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ public Builder setMvccReadPoint(long value) { bitField0_ |= 0x00080000; mvccReadPoint_ = value; onChanged(); return this; } /** * <code>optional uint64 mvcc_read_point = 20 [default = 0];</code> */ public Builder clearMvccReadPoint() { bitField0_ = (bitField0_ & ~0x00080000); mvccReadPoint_ = 0L; onChanged(); return this; } private boolean includeStartRow_ = true; /** * <code>optional bool include_start_row = 21 [default = true];</code> */ public boolean hasIncludeStartRow() { return ((bitField0_ & 0x00100000) == 0x00100000); } /** * <code>optional bool include_start_row = 21 [default = true];</code> */ public boolean getIncludeStartRow() { return includeStartRow_; } /** * <code>optional bool include_start_row = 21 [default = true];</code> */ public Builder setIncludeStartRow(boolean value) { bitField0_ |= 0x00100000; includeStartRow_ = value; onChanged(); return this; } /** * <code>optional bool include_start_row = 21 [default = true];</code> */ public Builder clearIncludeStartRow() { bitField0_ = (bitField0_ & ~0x00100000); includeStartRow_ = true; onChanged(); return this; } private boolean includeStopRow_ ; /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ public boolean hasIncludeStopRow() { return ((bitField0_ & 0x00200000) == 0x00200000); } /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ public boolean getIncludeStopRow() { return includeStopRow_; } /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ public Builder setIncludeStopRow(boolean value) { bitField0_ |= 0x00200000; includeStopRow_ = value; onChanged(); return this; } /** * <code>optional bool include_stop_row = 22 [default = false];</code> */ public Builder clearIncludeStopRow() { bitField0_ = (bitField0_ & ~0x00200000); includeStopRow_ = false; onChanged(); return this; } private int readType_ = 0; /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ public boolean hasReadType() { return ((bitField0_ & 0x00400000) == 0x00400000); } /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType getReadType() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType.valueOf(readType_); return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT : result; } /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ public Builder setReadType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00400000; readType_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .hbase.pb.Scan.ReadType readType = 23 [default = DEFAULT];</code> */ public Builder clearReadType() { bitField0_ = (bitField0_ & ~0x00400000); readType_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Scan) } // @@protoc_insertion_point(class_scope:hbase.pb.Scan) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Scan> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Scan>() { public Scan parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Scan(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Scan> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Scan> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ScanRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ScanRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ boolean hasScan(); /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getScan(); /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); /** * <code>optional uint64 scanner_id = 3;</code> */ boolean hasScannerId(); /** * <code>optional uint64 scanner_id = 3;</code> */ long getScannerId(); /** * <code>optional uint32 number_of_rows = 4;</code> */ boolean hasNumberOfRows(); /** * <code>optional uint32 number_of_rows = 4;</code> */ int getNumberOfRows(); /** * <code>optional bool close_scanner = 5;</code> */ boolean hasCloseScanner(); /** * <code>optional bool close_scanner = 5;</code> */ boolean getCloseScanner(); /** * <code>optional uint64 next_call_seq = 6;</code> */ boolean hasNextCallSeq(); /** * <code>optional uint64 next_call_seq = 6;</code> */ long getNextCallSeq(); /** * <code>optional bool client_handles_partials = 7;</code> */ boolean hasClientHandlesPartials(); /** * <code>optional bool client_handles_partials = 7;</code> */ boolean getClientHandlesPartials(); /** * <code>optional bool client_handles_heartbeats = 8;</code> */ boolean hasClientHandlesHeartbeats(); /** * <code>optional bool client_handles_heartbeats = 8;</code> */ boolean getClientHandlesHeartbeats(); /** * <code>optional bool track_scan_metrics = 9;</code> */ boolean hasTrackScanMetrics(); /** * <code>optional bool track_scan_metrics = 9;</code> */ boolean getTrackScanMetrics(); /** * <code>optional bool renew = 10 [default = false];</code> */ boolean hasRenew(); /** * <code>optional bool renew = 10 [default = false];</code> */ boolean getRenew(); /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ boolean hasLimitOfRows(); /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ int getLimitOfRows(); } /** * <pre> ** * A scan request. Initially, it should specify a scan. Later on, you * can use the scanner id returned to fetch result batches with a different * scan request. * The scanner will remain open if there are more results, and it's not * asked to be closed explicitly. * You can fetch the results and ask the scanner to be closed to save * a trip if you are not interested in remaining results. * </pre> * * Protobuf type {@code hbase.pb.ScanRequest} */ public static final class ScanRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ScanRequest) ScanRequestOrBuilder { // Use ScanRequest.newBuilder() to construct. private ScanRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ScanRequest() { scannerId_ = 0L; numberOfRows_ = 0; closeScanner_ = false; nextCallSeq_ = 0L; clientHandlesPartials_ = false; clientHandlesHeartbeats_ = false; trackScanMetrics_ = false; renew_ = false; limitOfRows_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ScanRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = scan_.toBuilder(); } scan_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(scan_); scan_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 24: { bitField0_ |= 0x00000004; scannerId_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; numberOfRows_ = input.readUInt32(); break; } case 40: { bitField0_ |= 0x00000010; closeScanner_ = input.readBool(); break; } case 48: { bitField0_ |= 0x00000020; nextCallSeq_ = input.readUInt64(); break; } case 56: { bitField0_ |= 0x00000040; clientHandlesPartials_ = input.readBool(); break; } case 64: { bitField0_ |= 0x00000080; clientHandlesHeartbeats_ = input.readBool(); break; } case 72: { bitField0_ |= 0x00000100; trackScanMetrics_ = input.readBool(); break; } case 80: { bitField0_ |= 0x00000200; renew_ = input.readBool(); break; } case 88: { bitField0_ |= 0x00000400; limitOfRows_ = input.readUInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.Builder.class); } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int SCAN_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan scan_; /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getScan() { return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } public static final int SCANNER_ID_FIELD_NUMBER = 3; private long scannerId_; /** * <code>optional uint64 scanner_id = 3;</code> */ public boolean hasScannerId() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 scanner_id = 3;</code> */ public long getScannerId() { return scannerId_; } public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4; private int numberOfRows_; /** * <code>optional uint32 number_of_rows = 4;</code> */ public boolean hasNumberOfRows() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint32 number_of_rows = 4;</code> */ public int getNumberOfRows() { return numberOfRows_; } public static final int CLOSE_SCANNER_FIELD_NUMBER = 5; private boolean closeScanner_; /** * <code>optional bool close_scanner = 5;</code> */ public boolean hasCloseScanner() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool close_scanner = 5;</code> */ public boolean getCloseScanner() { return closeScanner_; } public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6; private long nextCallSeq_; /** * <code>optional uint64 next_call_seq = 6;</code> */ public boolean hasNextCallSeq() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint64 next_call_seq = 6;</code> */ public long getNextCallSeq() { return nextCallSeq_; } public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7; private boolean clientHandlesPartials_; /** * <code>optional bool client_handles_partials = 7;</code> */ public boolean hasClientHandlesPartials() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional bool client_handles_partials = 7;</code> */ public boolean getClientHandlesPartials() { return clientHandlesPartials_; } public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8; private boolean clientHandlesHeartbeats_; /** * <code>optional bool client_handles_heartbeats = 8;</code> */ public boolean hasClientHandlesHeartbeats() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional bool client_handles_heartbeats = 8;</code> */ public boolean getClientHandlesHeartbeats() { return clientHandlesHeartbeats_; } public static final int TRACK_SCAN_METRICS_FIELD_NUMBER = 9; private boolean trackScanMetrics_; /** * <code>optional bool track_scan_metrics = 9;</code> */ public boolean hasTrackScanMetrics() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional bool track_scan_metrics = 9;</code> */ public boolean getTrackScanMetrics() { return trackScanMetrics_; } public static final int RENEW_FIELD_NUMBER = 10; private boolean renew_; /** * <code>optional bool renew = 10 [default = false];</code> */ public boolean hasRenew() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional bool renew = 10 [default = false];</code> */ public boolean getRenew() { return renew_; } public static final int LIMIT_OF_ROWS_FIELD_NUMBER = 11; private int limitOfRows_; /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ public boolean hasLimitOfRows() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ public int getLimitOfRows() { return limitOfRows_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasRegion()) { if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasScan()) { if (!getScan().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getScan()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, scannerId_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt32(4, numberOfRows_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, closeScanner_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt64(6, nextCallSeq_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeBool(7, clientHandlesPartials_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeBool(8, clientHandlesHeartbeats_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeBool(9, trackScanMetrics_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { output.writeBool(10, renew_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { output.writeUInt32(11, limitOfRows_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getScan()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, scannerId_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(4, numberOfRows_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(5, closeScanner_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(6, nextCallSeq_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(7, clientHandlesPartials_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(8, clientHandlesHeartbeats_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(9, trackScanMetrics_); } if (((bitField0_ & 0x00000200) == 0x00000200)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(10, renew_); } if (((bitField0_ & 0x00000400) == 0x00000400)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(11, limitOfRows_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); } result = result && (hasScannerId() == other.hasScannerId()); if (hasScannerId()) { result = result && (getScannerId() == other.getScannerId()); } result = result && (hasNumberOfRows() == other.hasNumberOfRows()); if (hasNumberOfRows()) { result = result && (getNumberOfRows() == other.getNumberOfRows()); } result = result && (hasCloseScanner() == other.hasCloseScanner()); if (hasCloseScanner()) { result = result && (getCloseScanner() == other.getCloseScanner()); } result = result && (hasNextCallSeq() == other.hasNextCallSeq()); if (hasNextCallSeq()) { result = result && (getNextCallSeq() == other.getNextCallSeq()); } result = result && (hasClientHandlesPartials() == other.hasClientHandlesPartials()); if (hasClientHandlesPartials()) { result = result && (getClientHandlesPartials() == other.getClientHandlesPartials()); } result = result && (hasClientHandlesHeartbeats() == other.hasClientHandlesHeartbeats()); if (hasClientHandlesHeartbeats()) { result = result && (getClientHandlesHeartbeats() == other.getClientHandlesHeartbeats()); } result = result && (hasTrackScanMetrics() == other.hasTrackScanMetrics()); if (hasTrackScanMetrics()) { result = result && (getTrackScanMetrics() == other.getTrackScanMetrics()); } result = result && (hasRenew() == other.hasRenew()); if (hasRenew()) { result = result && (getRenew() == other.getRenew()); } result = result && (hasLimitOfRows() == other.hasLimitOfRows()); if (hasLimitOfRows()) { result = result && (getLimitOfRows() == other.getLimitOfRows()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasScannerId()) { hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getScannerId()); } if (hasNumberOfRows()) { hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER; hash = (53 * hash) + getNumberOfRows(); } if (hasCloseScanner()) { hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getCloseScanner()); } if (hasNextCallSeq()) { hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNextCallSeq()); } if (hasClientHandlesPartials()) { hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getClientHandlesPartials()); } if (hasClientHandlesHeartbeats()) { hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getClientHandlesHeartbeats()); } if (hasTrackScanMetrics()) { hash = (37 * hash) + TRACK_SCAN_METRICS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getTrackScanMetrics()); } if (hasRenew()) { hash = (37 * hash) + RENEW_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getRenew()); } if (hasLimitOfRows()) { hash = (37 * hash) + LIMIT_OF_ROWS_FIELD_NUMBER; hash = (53 * hash) + getLimitOfRows(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * A scan request. Initially, it should specify a scan. Later on, you * can use the scanner id returned to fetch result batches with a different * scan request. * The scanner will remain open if there are more results, and it's not * asked to be closed explicitly. * You can fetch the results and ask the scanner to be closed to save * a trip if you are not interested in remaining results. * </pre> * * Protobuf type {@code hbase.pb.ScanRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ScanRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getScanFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (scanBuilder_ == null) { scan_ = null; } else { scanBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); scannerId_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); numberOfRows_ = 0; bitField0_ = (bitField0_ & ~0x00000008); closeScanner_ = false; bitField0_ = (bitField0_ & ~0x00000010); nextCallSeq_ = 0L; bitField0_ = (bitField0_ & ~0x00000020); clientHandlesPartials_ = false; bitField0_ = (bitField0_ & ~0x00000040); clientHandlesHeartbeats_ = false; bitField0_ = (bitField0_ & ~0x00000080); trackScanMetrics_ = false; bitField0_ = (bitField0_ & ~0x00000100); renew_ = false; bitField0_ = (bitField0_ & ~0x00000200); limitOfRows_ = 0; bitField0_ = (bitField0_ & ~0x00000400); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (scanBuilder_ == null) { result.scan_ = scan_; } else { result.scan_ = scanBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.scannerId_ = scannerId_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.numberOfRows_ = numberOfRows_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.closeScanner_ = closeScanner_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } result.nextCallSeq_ = nextCallSeq_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000040; } result.clientHandlesPartials_ = clientHandlesPartials_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000080; } result.clientHandlesHeartbeats_ = clientHandlesHeartbeats_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000100; } result.trackScanMetrics_ = trackScanMetrics_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000200; } result.renew_ = renew_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000400; } result.limitOfRows_ = limitOfRows_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasScannerId()) { setScannerId(other.getScannerId()); } if (other.hasNumberOfRows()) { setNumberOfRows(other.getNumberOfRows()); } if (other.hasCloseScanner()) { setCloseScanner(other.getCloseScanner()); } if (other.hasNextCallSeq()) { setNextCallSeq(other.getNextCallSeq()); } if (other.hasClientHandlesPartials()) { setClientHandlesPartials(other.getClientHandlesPartials()); } if (other.hasClientHandlesHeartbeats()) { setClientHandlesHeartbeats(other.getClientHandlesHeartbeats()); } if (other.hasTrackScanMetrics()) { setTrackScanMetrics(other.getTrackScanMetrics()); } if (other.hasRenew()) { setRenew(other.getRenew()); } if (other.hasLimitOfRows()) { setLimitOfRows(other.getLimitOfRows()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasRegion()) { if (!getRegion().isInitialized()) { return false; } } if (hasScan()) { if (!getScan().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>optional .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan scan_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan getScan() { if (scanBuilder_ == null) { return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } else { return scanBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public Builder setScan(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (value == null) { throw new NullPointerException(); } scan_ = value; onChanged(); } else { scanBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public Builder setScan( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { if (scanBuilder_ == null) { scan_ = builderForValue.build(); onChanged(); } else { scanBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public Builder mergeScan(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && scan_ != null && scan_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { scan_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); } else { scan_ = value; } onChanged(); } else { scanBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public Builder clearScan() { if (scanBuilder_ == null) { scan_ = null; onChanged(); } else { scanBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { bitField0_ |= 0x00000002; onChanged(); return getScanFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { if (scanBuilder_ != null) { return scanBuilder_.getMessageOrBuilder(); } else { return scan_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.getDefaultInstance() : scan_; } } /** * <code>optional .hbase.pb.Scan scan = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder() { if (scanBuilder_ == null) { scanBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanOrBuilder>( getScan(), getParentForChildren(), isClean()); scan_ = null; } return scanBuilder_; } private long scannerId_ ; /** * <code>optional uint64 scanner_id = 3;</code> */ public boolean hasScannerId() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint64 scanner_id = 3;</code> */ public long getScannerId() { return scannerId_; } /** * <code>optional uint64 scanner_id = 3;</code> */ public Builder setScannerId(long value) { bitField0_ |= 0x00000004; scannerId_ = value; onChanged(); return this; } /** * <code>optional uint64 scanner_id = 3;</code> */ public Builder clearScannerId() { bitField0_ = (bitField0_ & ~0x00000004); scannerId_ = 0L; onChanged(); return this; } private int numberOfRows_ ; /** * <code>optional uint32 number_of_rows = 4;</code> */ public boolean hasNumberOfRows() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint32 number_of_rows = 4;</code> */ public int getNumberOfRows() { return numberOfRows_; } /** * <code>optional uint32 number_of_rows = 4;</code> */ public Builder setNumberOfRows(int value) { bitField0_ |= 0x00000008; numberOfRows_ = value; onChanged(); return this; } /** * <code>optional uint32 number_of_rows = 4;</code> */ public Builder clearNumberOfRows() { bitField0_ = (bitField0_ & ~0x00000008); numberOfRows_ = 0; onChanged(); return this; } private boolean closeScanner_ ; /** * <code>optional bool close_scanner = 5;</code> */ public boolean hasCloseScanner() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool close_scanner = 5;</code> */ public boolean getCloseScanner() { return closeScanner_; } /** * <code>optional bool close_scanner = 5;</code> */ public Builder setCloseScanner(boolean value) { bitField0_ |= 0x00000010; closeScanner_ = value; onChanged(); return this; } /** * <code>optional bool close_scanner = 5;</code> */ public Builder clearCloseScanner() { bitField0_ = (bitField0_ & ~0x00000010); closeScanner_ = false; onChanged(); return this; } private long nextCallSeq_ ; /** * <code>optional uint64 next_call_seq = 6;</code> */ public boolean hasNextCallSeq() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint64 next_call_seq = 6;</code> */ public long getNextCallSeq() { return nextCallSeq_; } /** * <code>optional uint64 next_call_seq = 6;</code> */ public Builder setNextCallSeq(long value) { bitField0_ |= 0x00000020; nextCallSeq_ = value; onChanged(); return this; } /** * <code>optional uint64 next_call_seq = 6;</code> */ public Builder clearNextCallSeq() { bitField0_ = (bitField0_ & ~0x00000020); nextCallSeq_ = 0L; onChanged(); return this; } private boolean clientHandlesPartials_ ; /** * <code>optional bool client_handles_partials = 7;</code> */ public boolean hasClientHandlesPartials() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional bool client_handles_partials = 7;</code> */ public boolean getClientHandlesPartials() { return clientHandlesPartials_; } /** * <code>optional bool client_handles_partials = 7;</code> */ public Builder setClientHandlesPartials(boolean value) { bitField0_ |= 0x00000040; clientHandlesPartials_ = value; onChanged(); return this; } /** * <code>optional bool client_handles_partials = 7;</code> */ public Builder clearClientHandlesPartials() { bitField0_ = (bitField0_ & ~0x00000040); clientHandlesPartials_ = false; onChanged(); return this; } private boolean clientHandlesHeartbeats_ ; /** * <code>optional bool client_handles_heartbeats = 8;</code> */ public boolean hasClientHandlesHeartbeats() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional bool client_handles_heartbeats = 8;</code> */ public boolean getClientHandlesHeartbeats() { return clientHandlesHeartbeats_; } /** * <code>optional bool client_handles_heartbeats = 8;</code> */ public Builder setClientHandlesHeartbeats(boolean value) { bitField0_ |= 0x00000080; clientHandlesHeartbeats_ = value; onChanged(); return this; } /** * <code>optional bool client_handles_heartbeats = 8;</code> */ public Builder clearClientHandlesHeartbeats() { bitField0_ = (bitField0_ & ~0x00000080); clientHandlesHeartbeats_ = false; onChanged(); return this; } private boolean trackScanMetrics_ ; /** * <code>optional bool track_scan_metrics = 9;</code> */ public boolean hasTrackScanMetrics() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional bool track_scan_metrics = 9;</code> */ public boolean getTrackScanMetrics() { return trackScanMetrics_; } /** * <code>optional bool track_scan_metrics = 9;</code> */ public Builder setTrackScanMetrics(boolean value) { bitField0_ |= 0x00000100; trackScanMetrics_ = value; onChanged(); return this; } /** * <code>optional bool track_scan_metrics = 9;</code> */ public Builder clearTrackScanMetrics() { bitField0_ = (bitField0_ & ~0x00000100); trackScanMetrics_ = false; onChanged(); return this; } private boolean renew_ ; /** * <code>optional bool renew = 10 [default = false];</code> */ public boolean hasRenew() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional bool renew = 10 [default = false];</code> */ public boolean getRenew() { return renew_; } /** * <code>optional bool renew = 10 [default = false];</code> */ public Builder setRenew(boolean value) { bitField0_ |= 0x00000200; renew_ = value; onChanged(); return this; } /** * <code>optional bool renew = 10 [default = false];</code> */ public Builder clearRenew() { bitField0_ = (bitField0_ & ~0x00000200); renew_ = false; onChanged(); return this; } private int limitOfRows_ ; /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ public boolean hasLimitOfRows() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ public int getLimitOfRows() { return limitOfRows_; } /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ public Builder setLimitOfRows(int value) { bitField0_ |= 0x00000400; limitOfRows_ = value; onChanged(); return this; } /** * <pre> * if we have returned limit_of_rows rows to client, then close the scanner. * </pre> * * <code>optional uint32 limit_of_rows = 11 [default = 0];</code> */ public Builder clearLimitOfRows() { bitField0_ = (bitField0_ & ~0x00000400); limitOfRows_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ScanRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.ScanRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ScanRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ScanRequest>() { public ScanRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ScanRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ScanRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ScanRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ScanResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ScanResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ java.util.List<java.lang.Integer> getCellsPerResultList(); /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ int getCellsPerResultCount(); /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ int getCellsPerResult(int index); /** * <code>optional uint64 scanner_id = 2;</code> */ boolean hasScannerId(); /** * <code>optional uint64 scanner_id = 2;</code> */ long getScannerId(); /** * <code>optional bool more_results = 3;</code> */ boolean hasMoreResults(); /** * <code>optional bool more_results = 3;</code> */ boolean getMoreResults(); /** * <code>optional uint32 ttl = 4;</code> */ boolean hasTtl(); /** * <code>optional uint32 ttl = 4;</code> */ int getTtl(); /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result> getResultsList(); /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResults(int index); /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ int getResultsCount(); /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsOrBuilderList(); /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( int index); /** * <code>optional bool stale = 6;</code> */ boolean hasStale(); /** * <code>optional bool stale = 6;</code> */ boolean getStale(); /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ java.util.List<java.lang.Boolean> getPartialFlagPerResultList(); /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ int getPartialFlagPerResultCount(); /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ boolean getPartialFlagPerResult(int index); /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ boolean hasMoreResultsInRegion(); /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ boolean getMoreResultsInRegion(); /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ boolean hasHeartbeatMessage(); /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ boolean getHeartbeatMessage(); /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ boolean hasScanMetrics(); /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics(); /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder(); /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ boolean hasMvccReadPoint(); /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ long getMvccReadPoint(); } /** * <pre> ** * The scan response. If there are no more results, more_results will * be false. If it is not specified, it means there are more. * </pre> * * Protobuf type {@code hbase.pb.ScanResponse} */ public static final class ScanResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ScanResponse) ScanResponseOrBuilder { // Use ScanResponse.newBuilder() to construct. private ScanResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ScanResponse() { cellsPerResult_ = java.util.Collections.emptyList(); scannerId_ = 0L; moreResults_ = false; ttl_ = 0; results_ = java.util.Collections.emptyList(); stale_ = false; partialFlagPerResult_ = java.util.Collections.emptyList(); moreResultsInRegion_ = false; heartbeatMessage_ = false; mvccReadPoint_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ScanResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; } cellsPerResult_.add(input.readUInt32()); break; } case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; } while (input.getBytesUntilLimit() > 0) { cellsPerResult_.add(input.readUInt32()); } input.popLimit(limit); break; } case 16: { bitField0_ |= 0x00000001; scannerId_ = input.readUInt64(); break; } case 24: { bitField0_ |= 0x00000002; moreResults_ = input.readBool(); break; } case 32: { bitField0_ |= 0x00000004; ttl_ = input.readUInt32(); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { results_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result>(); mutable_bitField0_ |= 0x00000010; } results_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry)); break; } case 48: { bitField0_ |= 0x00000008; stale_ = input.readBool(); break; } case 56: { if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(); mutable_bitField0_ |= 0x00000040; } partialFlagPerResult_.add(input.readBool()); break; } case 58: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) { partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(); mutable_bitField0_ |= 0x00000040; } while (input.getBytesUntilLimit() > 0) { partialFlagPerResult_.add(input.readBool()); } input.popLimit(limit); break; } case 64: { bitField0_ |= 0x00000010; moreResultsInRegion_ = input.readBool(); break; } case 72: { bitField0_ |= 0x00000020; heartbeatMessage_ = input.readBool(); break; } case 82: { org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder subBuilder = null; if (((bitField0_ & 0x00000040) == 0x00000040)) { subBuilder = scanMetrics_.toBuilder(); } scanMetrics_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(scanMetrics_); scanMetrics_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000040; break; } case 88: { bitField0_ |= 0x00000080; mvccReadPoint_ = input.readUInt64(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { results_ = java.util.Collections.unmodifiableList(results_); } if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builder.class); } private int bitField0_; public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1; private java.util.List<java.lang.Integer> cellsPerResult_; /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public java.util.List<java.lang.Integer> getCellsPerResultList() { return cellsPerResult_; } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public int getCellsPerResultCount() { return cellsPerResult_.size(); } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public int getCellsPerResult(int index) { return cellsPerResult_.get(index); } public static final int SCANNER_ID_FIELD_NUMBER = 2; private long scannerId_; /** * <code>optional uint64 scanner_id = 2;</code> */ public boolean hasScannerId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 scanner_id = 2;</code> */ public long getScannerId() { return scannerId_; } public static final int MORE_RESULTS_FIELD_NUMBER = 3; private boolean moreResults_; /** * <code>optional bool more_results = 3;</code> */ public boolean hasMoreResults() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool more_results = 3;</code> */ public boolean getMoreResults() { return moreResults_; } public static final int TTL_FIELD_NUMBER = 4; private int ttl_; /** * <code>optional uint32 ttl = 4;</code> */ public boolean hasTtl() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional uint32 ttl = 4;</code> */ public int getTtl() { return ttl_; } public static final int RESULTS_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result> results_; /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result> getResultsList() { return results_; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsOrBuilderList() { return results_; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public int getResultsCount() { return results_.size(); } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResults(int index) { return results_.get(index); } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( int index) { return results_.get(index); } public static final int STALE_FIELD_NUMBER = 6; private boolean stale_; /** * <code>optional bool stale = 6;</code> */ public boolean hasStale() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional bool stale = 6;</code> */ public boolean getStale() { return stale_; } public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7; private java.util.List<java.lang.Boolean> partialFlagPerResult_; /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public java.util.List<java.lang.Boolean> getPartialFlagPerResultList() { return partialFlagPerResult_; } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public int getPartialFlagPerResultCount() { return partialFlagPerResult_.size(); } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public boolean getPartialFlagPerResult(int index) { return partialFlagPerResult_.get(index); } public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8; private boolean moreResultsInRegion_; /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ public boolean hasMoreResultsInRegion() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ public boolean getMoreResultsInRegion() { return moreResultsInRegion_; } public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9; private boolean heartbeatMessage_; /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ public boolean hasHeartbeatMessage() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ public boolean getHeartbeatMessage() { return heartbeatMessage_; } public static final int SCAN_METRICS_FIELD_NUMBER = 10; private org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_; /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public boolean hasScanMetrics() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() { return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() { return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } public static final int MVCC_READ_POINT_FIELD_NUMBER = 11; private long mvccReadPoint_; /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ public boolean hasMvccReadPoint() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ public long getMvccReadPoint() { return mvccReadPoint_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < cellsPerResult_.size(); i++) { output.writeUInt32(1, cellsPerResult_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(2, scannerId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, moreResults_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt32(4, ttl_); } for (int i = 0; i < results_.size(); i++) { output.writeMessage(5, results_.get(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBool(6, stale_); } for (int i = 0; i < partialFlagPerResult_.size(); i++) { output.writeBool(7, partialFlagPerResult_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(8, moreResultsInRegion_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBool(9, heartbeatMessage_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeMessage(10, getScanMetrics()); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt64(11, mvccReadPoint_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < cellsPerResult_.size(); i++) { dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32SizeNoTag(cellsPerResult_.get(i)); } size += dataSize; size += 1 * getCellsPerResultList().size(); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, scannerId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, moreResults_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(4, ttl_); } for (int i = 0; i < results_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, results_.get(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(6, stale_); } { int dataSize = 0; dataSize = 1 * getPartialFlagPerResultList().size(); size += dataSize; size += 1 * getPartialFlagPerResultList().size(); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(8, moreResultsInRegion_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(9, heartbeatMessage_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(10, getScanMetrics()); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(11, mvccReadPoint_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) obj; boolean result = true; result = result && getCellsPerResultList() .equals(other.getCellsPerResultList()); result = result && (hasScannerId() == other.hasScannerId()); if (hasScannerId()) { result = result && (getScannerId() == other.getScannerId()); } result = result && (hasMoreResults() == other.hasMoreResults()); if (hasMoreResults()) { result = result && (getMoreResults() == other.getMoreResults()); } result = result && (hasTtl() == other.hasTtl()); if (hasTtl()) { result = result && (getTtl() == other.getTtl()); } result = result && getResultsList() .equals(other.getResultsList()); result = result && (hasStale() == other.hasStale()); if (hasStale()) { result = result && (getStale() == other.getStale()); } result = result && getPartialFlagPerResultList() .equals(other.getPartialFlagPerResultList()); result = result && (hasMoreResultsInRegion() == other.hasMoreResultsInRegion()); if (hasMoreResultsInRegion()) { result = result && (getMoreResultsInRegion() == other.getMoreResultsInRegion()); } result = result && (hasHeartbeatMessage() == other.hasHeartbeatMessage()); if (hasHeartbeatMessage()) { result = result && (getHeartbeatMessage() == other.getHeartbeatMessage()); } result = result && (hasScanMetrics() == other.hasScanMetrics()); if (hasScanMetrics()) { result = result && getScanMetrics() .equals(other.getScanMetrics()); } result = result && (hasMvccReadPoint() == other.hasMvccReadPoint()); if (hasMvccReadPoint()) { result = result && (getMvccReadPoint() == other.getMvccReadPoint()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCellsPerResultCount() > 0) { hash = (37 * hash) + CELLS_PER_RESULT_FIELD_NUMBER; hash = (53 * hash) + getCellsPerResultList().hashCode(); } if (hasScannerId()) { hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getScannerId()); } if (hasMoreResults()) { hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getMoreResults()); } if (hasTtl()) { hash = (37 * hash) + TTL_FIELD_NUMBER; hash = (53 * hash) + getTtl(); } if (getResultsCount() > 0) { hash = (37 * hash) + RESULTS_FIELD_NUMBER; hash = (53 * hash) + getResultsList().hashCode(); } if (hasStale()) { hash = (37 * hash) + STALE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getStale()); } if (getPartialFlagPerResultCount() > 0) { hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER; hash = (53 * hash) + getPartialFlagPerResultList().hashCode(); } if (hasMoreResultsInRegion()) { hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getMoreResultsInRegion()); } if (hasHeartbeatMessage()) { hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getHeartbeatMessage()); } if (hasScanMetrics()) { hash = (37 * hash) + SCAN_METRICS_FIELD_NUMBER; hash = (53 * hash) + getScanMetrics().hashCode(); } if (hasMvccReadPoint()) { hash = (37 * hash) + MVCC_READ_POINT_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getMvccReadPoint()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The scan response. If there are no more results, more_results will * be false. If it is not specified, it means there are more. * </pre> * * Protobuf type {@code hbase.pb.ScanResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ScanResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResultsFieldBuilder(); getScanMetricsFieldBuilder(); } } public Builder clear() { super.clear(); cellsPerResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); scannerId_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); moreResults_ = false; bitField0_ = (bitField0_ & ~0x00000004); ttl_ = 0; bitField0_ = (bitField0_ & ~0x00000008); if (resultsBuilder_ == null) { results_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { resultsBuilder_.clear(); } stale_ = false; bitField0_ = (bitField0_ & ~0x00000020); partialFlagPerResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); moreResultsInRegion_ = false; bitField0_ = (bitField0_ & ~0x00000080); heartbeatMessage_ = false; bitField0_ = (bitField0_ & ~0x00000100); if (scanMetricsBuilder_ == null) { scanMetrics_ = null; } else { scanMetricsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); mvccReadPoint_ = 0L; bitField0_ = (bitField0_ & ~0x00000400); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ScanResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_); bitField0_ = (bitField0_ & ~0x00000001); } result.cellsPerResult_ = cellsPerResult_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.scannerId_ = scannerId_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } result.moreResults_ = moreResults_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.ttl_ = ttl_; if (resultsBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { results_ = java.util.Collections.unmodifiableList(results_); bitField0_ = (bitField0_ & ~0x00000010); } result.results_ = results_; } else { result.results_ = resultsBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000008; } result.stale_ = stale_; if (((bitField0_ & 0x00000040) == 0x00000040)) { partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_); bitField0_ = (bitField0_ & ~0x00000040); } result.partialFlagPerResult_ = partialFlagPerResult_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000010; } result.moreResultsInRegion_ = moreResultsInRegion_; if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000020; } result.heartbeatMessage_ = heartbeatMessage_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000040; } if (scanMetricsBuilder_ == null) { result.scanMetrics_ = scanMetrics_; } else { result.scanMetrics_ = scanMetricsBuilder_.build(); } if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000080; } result.mvccReadPoint_ = mvccReadPoint_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this; if (!other.cellsPerResult_.isEmpty()) { if (cellsPerResult_.isEmpty()) { cellsPerResult_ = other.cellsPerResult_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCellsPerResultIsMutable(); cellsPerResult_.addAll(other.cellsPerResult_); } onChanged(); } if (other.hasScannerId()) { setScannerId(other.getScannerId()); } if (other.hasMoreResults()) { setMoreResults(other.getMoreResults()); } if (other.hasTtl()) { setTtl(other.getTtl()); } if (resultsBuilder_ == null) { if (!other.results_.isEmpty()) { if (results_.isEmpty()) { results_ = other.results_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureResultsIsMutable(); results_.addAll(other.results_); } onChanged(); } } else { if (!other.results_.isEmpty()) { if (resultsBuilder_.isEmpty()) { resultsBuilder_.dispose(); resultsBuilder_ = null; results_ = other.results_; bitField0_ = (bitField0_ & ~0x00000010); resultsBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResultsFieldBuilder() : null; } else { resultsBuilder_.addAllMessages(other.results_); } } } if (other.hasStale()) { setStale(other.getStale()); } if (!other.partialFlagPerResult_.isEmpty()) { if (partialFlagPerResult_.isEmpty()) { partialFlagPerResult_ = other.partialFlagPerResult_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensurePartialFlagPerResultIsMutable(); partialFlagPerResult_.addAll(other.partialFlagPerResult_); } onChanged(); } if (other.hasMoreResultsInRegion()) { setMoreResultsInRegion(other.getMoreResultsInRegion()); } if (other.hasHeartbeatMessage()) { setHeartbeatMessage(other.getHeartbeatMessage()); } if (other.hasScanMetrics()) { mergeScanMetrics(other.getScanMetrics()); } if (other.hasMvccReadPoint()) { setMvccReadPoint(other.getMvccReadPoint()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<java.lang.Integer> cellsPerResult_ = java.util.Collections.emptyList(); private void ensureCellsPerResultIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(cellsPerResult_); bitField0_ |= 0x00000001; } } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public java.util.List<java.lang.Integer> getCellsPerResultList() { return java.util.Collections.unmodifiableList(cellsPerResult_); } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public int getCellsPerResultCount() { return cellsPerResult_.size(); } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public int getCellsPerResult(int index) { return cellsPerResult_.get(index); } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public Builder setCellsPerResult( int index, int value) { ensureCellsPerResultIsMutable(); cellsPerResult_.set(index, value); onChanged(); return this; } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public Builder addCellsPerResult(int value) { ensureCellsPerResultIsMutable(); cellsPerResult_.add(value); onChanged(); return this; } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public Builder addAllCellsPerResult( java.lang.Iterable<? extends java.lang.Integer> values) { ensureCellsPerResultIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, cellsPerResult_); onChanged(); return this; } /** * <pre> * This field is filled in if we are doing cellblocks. A cellblock is made up * of all Cells serialized out as one cellblock BUT responses from a server * have their Cells grouped by Result. So we can reconstitute the * Results on the client-side, this field is a list of counts of Cells * in each Result that makes up the response. For example, if this field * has 3, 3, 3 in it, then we know that on the client, we are to make * three Results each of three Cells each. * </pre> * * <code>repeated uint32 cells_per_result = 1;</code> */ public Builder clearCellsPerResult() { cellsPerResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private long scannerId_ ; /** * <code>optional uint64 scanner_id = 2;</code> */ public boolean hasScannerId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 scanner_id = 2;</code> */ public long getScannerId() { return scannerId_; } /** * <code>optional uint64 scanner_id = 2;</code> */ public Builder setScannerId(long value) { bitField0_ |= 0x00000002; scannerId_ = value; onChanged(); return this; } /** * <code>optional uint64 scanner_id = 2;</code> */ public Builder clearScannerId() { bitField0_ = (bitField0_ & ~0x00000002); scannerId_ = 0L; onChanged(); return this; } private boolean moreResults_ ; /** * <code>optional bool more_results = 3;</code> */ public boolean hasMoreResults() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool more_results = 3;</code> */ public boolean getMoreResults() { return moreResults_; } /** * <code>optional bool more_results = 3;</code> */ public Builder setMoreResults(boolean value) { bitField0_ |= 0x00000004; moreResults_ = value; onChanged(); return this; } /** * <code>optional bool more_results = 3;</code> */ public Builder clearMoreResults() { bitField0_ = (bitField0_ & ~0x00000004); moreResults_ = false; onChanged(); return this; } private int ttl_ ; /** * <code>optional uint32 ttl = 4;</code> */ public boolean hasTtl() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint32 ttl = 4;</code> */ public int getTtl() { return ttl_; } /** * <code>optional uint32 ttl = 4;</code> */ public Builder setTtl(int value) { bitField0_ |= 0x00000008; ttl_ = value; onChanged(); return this; } /** * <code>optional uint32 ttl = 4;</code> */ public Builder clearTtl() { bitField0_ = (bitField0_ & ~0x00000008); ttl_ = 0; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result> results_ = java.util.Collections.emptyList(); private void ensureResultsIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { results_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result>(results_); bitField0_ |= 0x00000010; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultsBuilder_; /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result> getResultsList() { if (resultsBuilder_ == null) { return java.util.Collections.unmodifiableList(results_); } else { return resultsBuilder_.getMessageList(); } } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public int getResultsCount() { if (resultsBuilder_ == null) { return results_.size(); } else { return resultsBuilder_.getCount(); } } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResults(int index) { if (resultsBuilder_ == null) { return results_.get(index); } else { return resultsBuilder_.getMessage(index); } } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder setResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultsIsMutable(); results_.set(index, value); onChanged(); } else { resultsBuilder_.setMessage(index, value); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder setResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.set(index, builderForValue.build()); onChanged(); } else { resultsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder addResults(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultsIsMutable(); results_.add(value); onChanged(); } else { resultsBuilder_.addMessage(value); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder addResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultsIsMutable(); results_.add(index, value); onChanged(); } else { resultsBuilder_.addMessage(index, value); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder addResults( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.add(builderForValue.build()); onChanged(); } else { resultsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder addResults( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.add(index, builderForValue.build()); onChanged(); } else { resultsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder addAllResults( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result> values) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, results_); onChanged(); } else { resultsBuilder_.addAllMessages(values); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder clearResults() { if (resultsBuilder_ == null) { results_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { resultsBuilder_.clear(); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public Builder removeResults(int index) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.remove(index); onChanged(); } else { resultsBuilder_.remove(index); } return this; } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder getResultsBuilder( int index) { return getResultsFieldBuilder().getBuilder(index); } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( int index) { if (resultsBuilder_ == null) { return results_.get(index); } else { return resultsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsOrBuilderList() { if (resultsBuilder_ != null) { return resultsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(results_); } } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder() { return getResultsFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder( int index) { return getResultsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } /** * <pre> * If cells are not carried in an accompanying cellblock, then they are pb'd here. * This field is mutually exclusive with cells_per_result (since the Cells will * be inside the pb'd Result) * </pre> * * <code>repeated .hbase.pb.Result results = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder> getResultsBuilderList() { return getResultsFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsFieldBuilder() { if (resultsBuilder_ == null) { resultsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( results_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); results_ = null; } return resultsBuilder_; } private boolean stale_ ; /** * <code>optional bool stale = 6;</code> */ public boolean hasStale() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool stale = 6;</code> */ public boolean getStale() { return stale_; } /** * <code>optional bool stale = 6;</code> */ public Builder setStale(boolean value) { bitField0_ |= 0x00000020; stale_ = value; onChanged(); return this; } /** * <code>optional bool stale = 6;</code> */ public Builder clearStale() { bitField0_ = (bitField0_ & ~0x00000020); stale_ = false; onChanged(); return this; } private java.util.List<java.lang.Boolean> partialFlagPerResult_ = java.util.Collections.emptyList(); private void ensurePartialFlagPerResultIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(partialFlagPerResult_); bitField0_ |= 0x00000040; } } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public java.util.List<java.lang.Boolean> getPartialFlagPerResultList() { return java.util.Collections.unmodifiableList(partialFlagPerResult_); } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public int getPartialFlagPerResultCount() { return partialFlagPerResult_.size(); } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public boolean getPartialFlagPerResult(int index) { return partialFlagPerResult_.get(index); } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public Builder setPartialFlagPerResult( int index, boolean value) { ensurePartialFlagPerResultIsMutable(); partialFlagPerResult_.set(index, value); onChanged(); return this; } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public Builder addPartialFlagPerResult(boolean value) { ensurePartialFlagPerResultIsMutable(); partialFlagPerResult_.add(value); onChanged(); return this; } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public Builder addAllPartialFlagPerResult( java.lang.Iterable<? extends java.lang.Boolean> values) { ensurePartialFlagPerResultIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, partialFlagPerResult_); onChanged(); return this; } /** * <pre> * This field is filled in if we are doing cellblocks. In the event that a row * could not fit all of its cells into a single RPC chunk, the results will be * returned as partials, and reconstructed into a complete result on the client * side. This field is a list of flags indicating whether or not the result * that the cells belong to is a partial result. For example, if this field * has false, false, true in it, then we know that on the client side, we need to * make another RPC request since the last result was only a partial. * </pre> * * <code>repeated bool partial_flag_per_result = 7;</code> */ public Builder clearPartialFlagPerResult() { partialFlagPerResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } private boolean moreResultsInRegion_ ; /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ public boolean hasMoreResultsInRegion() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ public boolean getMoreResultsInRegion() { return moreResultsInRegion_; } /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ public Builder setMoreResultsInRegion(boolean value) { bitField0_ |= 0x00000080; moreResultsInRegion_ = value; onChanged(); return this; } /** * <pre> * A server may choose to limit the number of results returned to the client for * reasons such as the size in bytes or quantity of results accumulated. This field * will true when more results exist in the current region. * </pre> * * <code>optional bool more_results_in_region = 8;</code> */ public Builder clearMoreResultsInRegion() { bitField0_ = (bitField0_ & ~0x00000080); moreResultsInRegion_ = false; onChanged(); return this; } private boolean heartbeatMessage_ ; /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ public boolean hasHeartbeatMessage() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ public boolean getHeartbeatMessage() { return heartbeatMessage_; } /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ public Builder setHeartbeatMessage(boolean value) { bitField0_ |= 0x00000100; heartbeatMessage_ = value; onChanged(); return this; } /** * <pre> * This field is filled in if the server is sending back a heartbeat message. * Heartbeat messages are sent back to the client to prevent the scanner from * timing out. Seeing a heartbeat message communicates to the Client that the * server would have continued to scan had the time limit not been reached. * </pre> * * <code>optional bool heartbeat_message = 9;</code> */ public Builder clearHeartbeatMessage() { bitField0_ = (bitField0_ & ~0x00000100); heartbeatMessage_ = false; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> scanMetricsBuilder_; /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public boolean hasScanMetrics() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() { if (scanMetricsBuilder_ == null) { return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } else { return scanMetricsBuilder_.getMessage(); } } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public Builder setScanMetrics(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics value) { if (scanMetricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } scanMetrics_ = value; onChanged(); } else { scanMetricsBuilder_.setMessage(value); } bitField0_ |= 0x00000200; return this; } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public Builder setScanMetrics( org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder builderForValue) { if (scanMetricsBuilder_ == null) { scanMetrics_ = builderForValue.build(); onChanged(); } else { scanMetricsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000200; return this; } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public Builder mergeScanMetrics(org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics value) { if (scanMetricsBuilder_ == null) { if (((bitField0_ & 0x00000200) == 0x00000200) && scanMetrics_ != null && scanMetrics_ != org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) { scanMetrics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder(scanMetrics_).mergeFrom(value).buildPartial(); } else { scanMetrics_ = value; } onChanged(); } else { scanMetricsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000200; return this; } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public Builder clearScanMetrics() { if (scanMetricsBuilder_ == null) { scanMetrics_ = null; onChanged(); } else { scanMetricsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000200); return this; } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder getScanMetricsBuilder() { bitField0_ |= 0x00000200; onChanged(); return getScanMetricsFieldBuilder().getBuilder(); } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() { if (scanMetricsBuilder_ != null) { return scanMetricsBuilder_.getMessageOrBuilder(); } else { return scanMetrics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance() : scanMetrics_; } } /** * <pre> * This field is filled in if the client has requested that scan metrics be tracked. * The metrics tracked here are sent back to the client to be tracked together with * the existing client side metrics. * </pre> * * <code>optional .hbase.pb.ScanMetrics scan_metrics = 10;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> getScanMetricsFieldBuilder() { if (scanMetricsBuilder_ == null) { scanMetricsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>( getScanMetrics(), getParentForChildren(), isClean()); scanMetrics_ = null; } return scanMetricsBuilder_; } private long mvccReadPoint_ ; /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ public boolean hasMvccReadPoint() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ public long getMvccReadPoint() { return mvccReadPoint_; } /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ public Builder setMvccReadPoint(long value) { bitField0_ |= 0x00000400; mvccReadPoint_ = value; onChanged(); return this; } /** * <pre> * The mvcc read point which is used to open the scanner at server side. Client can * make use of this mvcc_read_point when restarting a scanner to get a consistent view * of a row. * </pre> * * <code>optional uint64 mvcc_read_point = 11 [default = 0];</code> */ public Builder clearMvccReadPoint() { bitField0_ = (bitField0_ & ~0x00000400); mvccReadPoint_ = 0L; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ScanResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.ScanResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ScanResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ScanResponse>() { public ScanResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ScanResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ScanResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ScanResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface BulkLoadHFileRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadHFileRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList(); /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ int getFamilyPathCount(); /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> getFamilyPathOrBuilderList(); /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index); /** * <code>optional bool assign_seq_num = 3;</code> */ boolean hasAssignSeqNum(); /** * <code>optional bool assign_seq_num = 3;</code> */ boolean getAssignSeqNum(); /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ boolean hasFsToken(); /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getFsToken(); /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); /** * <code>optional string bulk_token = 5;</code> */ boolean hasBulkToken(); /** * <code>optional string bulk_token = 5;</code> */ java.lang.String getBulkToken(); /** * <code>optional string bulk_token = 5;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes(); /** * <code>optional bool copy_file = 6 [default = false];</code> */ boolean hasCopyFile(); /** * <code>optional bool copy_file = 6 [default = false];</code> */ boolean getCopyFile(); } /** * <pre> ** * Atomically bulk load multiple HFiles (say from different column families) * into an open region. * </pre> * * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} */ public static final class BulkLoadHFileRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadHFileRequest) BulkLoadHFileRequestOrBuilder { // Use BulkLoadHFileRequest.newBuilder() to construct. private BulkLoadHFileRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BulkLoadHFileRequest() { familyPath_ = java.util.Collections.emptyList(); assignSeqNum_ = false; bulkToken_ = ""; copyFile_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BulkLoadHFileRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>(); mutable_bitField0_ |= 0x00000002; } familyPath_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry)); break; } case 24: { bitField0_ |= 0x00000002; assignSeqNum_ = input.readBool(); break; } case 34: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = fsToken_.toBuilder(); } fsToken_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(fsToken_); fsToken_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 42: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; bulkToken_ = bs; break; } case 48: { bitField0_ |= 0x00000010; copyFile_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { familyPath_ = java.util.Collections.unmodifiableList(familyPath_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); } public interface FamilyPathOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadHFileRequest.FamilyPath) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes family = 1;</code> */ boolean hasFamily(); /** * <code>required bytes family = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily(); /** * <code>required string path = 2;</code> */ boolean hasPath(); /** * <code>required string path = 2;</code> */ java.lang.String getPath(); /** * <code>required string path = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPathBytes(); } /** * Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath} */ public static final class FamilyPath extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadHFileRequest.FamilyPath) FamilyPathOrBuilder { // Use FamilyPath.newBuilder() to construct. private FamilyPath(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FamilyPath() { family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; path_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FamilyPath( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; family_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; path_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); } private int bitField0_; public static final int FAMILY_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } public static final int PATH_FIELD_NUMBER = 2; private volatile java.lang.Object path_; /** * <code>required string path = 2;</code> */ public boolean hasPath() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string path = 2;</code> */ public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { path_ = s; } return s; } } /** * <code>required string path = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamily()) { memoizedIsInitialized = 0; return false; } if (!hasPath()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, path_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, path_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { result = result && getFamily() .equals(other.getFamily()); } result = result && (hasPath() == other.hasPath()); if (hasPath()) { result = result && getPath() .equals(other.getPath()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } if (hasPath()) { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BulkLoadHFileRequest.FamilyPath} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadHFileRequest.FamilyPath) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); path_ = ""; bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.family_ = family_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.path_ = path_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } if (other.hasPath()) { bitField0_ |= 0x00000002; path_ = other.path_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasFamily()) { return false; } if (!hasPath()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() { return family_; } /** * <code>required bytes family = 1;</code> */ public Builder setFamily(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; family_ = value; onChanged(); return this; } /** * <code>required bytes family = 1;</code> */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } private java.lang.Object path_ = ""; /** * <code>required string path = 2;</code> */ public boolean hasPath() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string path = 2;</code> */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { path_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string path = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string path = 2;</code> */ public Builder setPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; path_ = value; onChanged(); return this; } /** * <code>required string path = 2;</code> */ public Builder clearPath() { bitField0_ = (bitField0_ & ~0x00000002); path_ = getDefaultInstance().getPath(); onChanged(); return this; } /** * <code>required string path = 2;</code> */ public Builder setPathBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; path_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath) } // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest.FamilyPath) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FamilyPath> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FamilyPath>() { public FamilyPath parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new FamilyPath(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FamilyPath> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FamilyPath> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int FAMILY_PATH_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_; /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() { return familyPath_; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> getFamilyPathOrBuilderList() { return familyPath_; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public int getFamilyPathCount() { return familyPath_.size(); } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { return familyPath_.get(index); } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { return familyPath_.get(index); } public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 3; private boolean assignSeqNum_; /** * <code>optional bool assign_seq_num = 3;</code> */ public boolean hasAssignSeqNum() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bool assign_seq_num = 3;</code> */ public boolean getAssignSeqNum() { return assignSeqNum_; } public static final int FS_TOKEN_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken fsToken_; /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public boolean hasFsToken() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getFsToken() { return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } public static final int BULK_TOKEN_FIELD_NUMBER = 5; private volatile java.lang.Object bulkToken_; /** * <code>optional string bulk_token = 5;</code> */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string bulk_token = 5;</code> */ public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } } /** * <code>optional string bulk_token = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes() { java.lang.Object ref = bulkToken_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); bulkToken_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int COPY_FILE_FIELD_NUMBER = 6; private boolean copyFile_; /** * <code>optional bool copy_file = 6 [default = false];</code> */ public boolean hasCopyFile() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bool copy_file = 6 [default = false];</code> */ public boolean getCopyFile() { return copyFile_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getFamilyPathCount(); i++) { if (!getFamilyPath(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } for (int i = 0; i < familyPath_.size(); i++) { output.writeMessage(2, familyPath_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, assignSeqNum_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(4, getFsToken()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 5, bulkToken_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(6, copyFile_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } for (int i = 0; i < familyPath_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, familyPath_.get(i)); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(3, assignSeqNum_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, getFsToken()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(5, bulkToken_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(6, copyFile_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && getFamilyPathList() .equals(other.getFamilyPathList()); result = result && (hasAssignSeqNum() == other.hasAssignSeqNum()); if (hasAssignSeqNum()) { result = result && (getAssignSeqNum() == other.getAssignSeqNum()); } result = result && (hasFsToken() == other.hasFsToken()); if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken()); } result = result && (hasBulkToken() == other.hasBulkToken()); if (hasBulkToken()) { result = result && getBulkToken() .equals(other.getBulkToken()); } result = result && (hasCopyFile() == other.hasCopyFile()); if (hasCopyFile()) { result = result && (getCopyFile() == other.getCopyFile()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (getFamilyPathCount() > 0) { hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER; hash = (53 * hash) + getFamilyPathList().hashCode(); } if (hasAssignSeqNum()) { hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getAssignSeqNum()); } if (hasFsToken()) { hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getFsToken().hashCode(); } if (hasBulkToken()) { hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getBulkToken().hashCode(); } if (hasCopyFile()) { hash = (37 * hash) + COPY_FILE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getCopyFile()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Atomically bulk load multiple HFiles (say from different column families) * into an open region. * </pre> * * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadHFileRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getFamilyPathFieldBuilder(); getFsTokenFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (familyPathBuilder_ == null) { familyPath_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { familyPathBuilder_.clear(); } assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000004); if (fsTokenBuilder_ == null) { fsToken_ = null; } else { fsTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000010); copyFile_ = false; bitField0_ = (bitField0_ & ~0x00000020); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (familyPathBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { familyPath_ = java.util.Collections.unmodifiableList(familyPath_); bitField0_ = (bitField0_ & ~0x00000002); } result.familyPath_ = familyPath_; } else { result.familyPath_ = familyPathBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } result.assignSeqNum_ = assignSeqNum_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } if (fsTokenBuilder_ == null) { result.fsToken_ = fsToken_; } else { result.fsToken_ = fsTokenBuilder_.build(); } if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000008; } result.bulkToken_ = bulkToken_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000010; } result.copyFile_ = copyFile_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (familyPathBuilder_ == null) { if (!other.familyPath_.isEmpty()) { if (familyPath_.isEmpty()) { familyPath_ = other.familyPath_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFamilyPathIsMutable(); familyPath_.addAll(other.familyPath_); } onChanged(); } } else { if (!other.familyPath_.isEmpty()) { if (familyPathBuilder_.isEmpty()) { familyPathBuilder_.dispose(); familyPathBuilder_ = null; familyPath_ = other.familyPath_; bitField0_ = (bitField0_ & ~0x00000002); familyPathBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFamilyPathFieldBuilder() : null; } else { familyPathBuilder_.addAllMessages(other.familyPath_); } } } if (other.hasAssignSeqNum()) { setAssignSeqNum(other.getAssignSeqNum()); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } if (other.hasBulkToken()) { bitField0_ |= 0x00000010; bulkToken_ = other.bulkToken_; onChanged(); } if (other.hasCopyFile()) { setCopyFile(other.getCopyFile()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } for (int i = 0; i < getFamilyPathCount(); i++) { if (!getFamilyPath(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_ = java.util.Collections.emptyList(); private void ensureFamilyPathIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPath_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() { if (familyPathBuilder_ == null) { return java.util.Collections.unmodifiableList(familyPath_); } else { return familyPathBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public int getFamilyPathCount() { if (familyPathBuilder_ == null) { return familyPath_.size(); } else { return familyPathBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { if (familyPathBuilder_ == null) { return familyPath_.get(index); } else { return familyPathBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder setFamilyPath( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFamilyPathIsMutable(); familyPath_.set(index, value); onChanged(); } else { familyPathBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder setFamilyPath( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.set(index, builderForValue.build()); onChanged(); } else { familyPathBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder addFamilyPath(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFamilyPathIsMutable(); familyPath_.add(value); onChanged(); } else { familyPathBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder addFamilyPath( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFamilyPathIsMutable(); familyPath_.add(index, value); onChanged(); } else { familyPathBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder addFamilyPath( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.add(builderForValue.build()); onChanged(); } else { familyPathBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder addFamilyPath( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.add(index, builderForValue.build()); onChanged(); } else { familyPathBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder addAllFamilyPath( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, familyPath_); onChanged(); } else { familyPathBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder clearFamilyPath() { if (familyPathBuilder_ == null) { familyPath_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { familyPathBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public Builder removeFamilyPath(int index) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.remove(index); onChanged(); } else { familyPathBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { if (familyPathBuilder_ == null) { return familyPath_.get(index); } else { return familyPathBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> getFamilyPathOrBuilderList() { if (familyPathBuilder_ != null) { return familyPathBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(familyPath_); } } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { return getFamilyPathFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } /** * <code>repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder> getFamilyPathBuilderList() { return getFamilyPathFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> getFamilyPathFieldBuilder() { if (familyPathBuilder_ == null) { familyPathBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( familyPath_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); familyPath_ = null; } return familyPathBuilder_; } private boolean assignSeqNum_ ; /** * <code>optional bool assign_seq_num = 3;</code> */ public boolean hasAssignSeqNum() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool assign_seq_num = 3;</code> */ public boolean getAssignSeqNum() { return assignSeqNum_; } /** * <code>optional bool assign_seq_num = 3;</code> */ public Builder setAssignSeqNum(boolean value) { bitField0_ |= 0x00000004; assignSeqNum_ = value; onChanged(); return this; } /** * <code>optional bool assign_seq_num = 3;</code> */ public Builder clearAssignSeqNum() { bitField0_ = (bitField0_ & ~0x00000004); assignSeqNum_ = false; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken fsToken_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_; /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public boolean hasFsToken() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getFsToken() { if (fsTokenBuilder_ == null) { return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } else { return fsTokenBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public Builder setFsToken(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); } fsToken_ = value; onChanged(); } else { fsTokenBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public Builder setFsToken( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) { if (fsTokenBuilder_ == null) { fsToken_ = builderForValue.build(); onChanged(); } else { fsTokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public Builder mergeFsToken(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && fsToken_ != null && fsToken_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) { fsToken_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); } else { fsToken_ = value; } onChanged(); } else { fsTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public Builder clearFsToken() { if (fsTokenBuilder_ == null) { fsToken_ = null; onChanged(); } else { fsTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() { bitField0_ |= 0x00000008; onChanged(); return getFsTokenFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { if (fsTokenBuilder_ != null) { return fsTokenBuilder_.getMessageOrBuilder(); } else { return fsToken_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance() : fsToken_; } } /** * <code>optional .hbase.pb.DelegationToken fs_token = 4;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> getFsTokenFieldBuilder() { if (fsTokenBuilder_ == null) { fsTokenBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>( getFsToken(), getParentForChildren(), isClean()); fsToken_ = null; } return fsTokenBuilder_; } private java.lang.Object bulkToken_ = ""; /** * <code>optional string bulk_token = 5;</code> */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional string bulk_token = 5;</code> */ public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string bulk_token = 5;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes() { java.lang.Object ref = bulkToken_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); bulkToken_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string bulk_token = 5;</code> */ public Builder setBulkToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; bulkToken_ = value; onChanged(); return this; } /** * <code>optional string bulk_token = 5;</code> */ public Builder clearBulkToken() { bitField0_ = (bitField0_ & ~0x00000010); bulkToken_ = getDefaultInstance().getBulkToken(); onChanged(); return this; } /** * <code>optional string bulk_token = 5;</code> */ public Builder setBulkTokenBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; bulkToken_ = value; onChanged(); return this; } private boolean copyFile_ ; /** * <code>optional bool copy_file = 6 [default = false];</code> */ public boolean hasCopyFile() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bool copy_file = 6 [default = false];</code> */ public boolean getCopyFile() { return copyFile_; } /** * <code>optional bool copy_file = 6 [default = false];</code> */ public Builder setCopyFile(boolean value) { bitField0_ |= 0x00000020; copyFile_ = value; onChanged(); return this; } /** * <code>optional bool copy_file = 6 [default = false];</code> */ public Builder clearCopyFile() { bitField0_ = (bitField0_ & ~0x00000020); copyFile_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadHFileRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BulkLoadHFileRequest>() { public BulkLoadHFileRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BulkLoadHFileRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadHFileRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadHFileRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface BulkLoadHFileResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.BulkLoadHFileResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bool loaded = 1;</code> */ boolean hasLoaded(); /** * <code>required bool loaded = 1;</code> */ boolean getLoaded(); } /** * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} */ public static final class BulkLoadHFileResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.BulkLoadHFileResponse) BulkLoadHFileResponseOrBuilder { // Use BulkLoadHFileResponse.newBuilder() to construct. private BulkLoadHFileResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BulkLoadHFileResponse() { loaded_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BulkLoadHFileResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; loaded_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); } private int bitField0_; public static final int LOADED_FIELD_NUMBER = 1; private boolean loaded_; /** * <code>required bool loaded = 1;</code> */ public boolean hasLoaded() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool loaded = 1;</code> */ public boolean getLoaded() { return loaded_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasLoaded()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, loaded_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(1, loaded_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; boolean result = true; result = result && (hasLoaded() == other.hasLoaded()); if (hasLoaded()) { result = result && (getLoaded() == other.getLoaded()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLoaded()) { hash = (37 * hash) + LOADED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getLoaded()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.BulkLoadHFileResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); loaded_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.loaded_ = loaded_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; if (other.hasLoaded()) { setLoaded(other.getLoaded()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasLoaded()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean loaded_ ; /** * <code>required bool loaded = 1;</code> */ public boolean hasLoaded() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bool loaded = 1;</code> */ public boolean getLoaded() { return loaded_; } /** * <code>required bool loaded = 1;</code> */ public Builder setLoaded(boolean value) { bitField0_ |= 0x00000001; loaded_ = value; onChanged(); return this; } /** * <code>required bool loaded = 1;</code> */ public Builder clearLoaded() { bitField0_ = (bitField0_ & ~0x00000001); loaded_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadHFileResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BulkLoadHFileResponse>() { public BulkLoadHFileResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BulkLoadHFileResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadHFileResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BulkLoadHFileResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface DelegationTokenOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.DelegationToken) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional bytes identifier = 1;</code> */ boolean hasIdentifier(); /** * <code>optional bytes identifier = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getIdentifier(); /** * <code>optional bytes password = 2;</code> */ boolean hasPassword(); /** * <code>optional bytes password = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPassword(); /** * <code>optional string kind = 3;</code> */ boolean hasKind(); /** * <code>optional string kind = 3;</code> */ java.lang.String getKind(); /** * <code>optional string kind = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getKindBytes(); /** * <code>optional string service = 4;</code> */ boolean hasService(); /** * <code>optional string service = 4;</code> */ java.lang.String getService(); /** * <code>optional string service = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceBytes(); } /** * Protobuf type {@code hbase.pb.DelegationToken} */ public static final class DelegationToken extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.DelegationToken) DelegationTokenOrBuilder { // Use DelegationToken.newBuilder() to construct. private DelegationToken(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DelegationToken() { identifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; password_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; kind_ = ""; service_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DelegationToken( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; identifier_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; password_ = input.readBytes(); break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; kind_ = bs; break; } case 34: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; service_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder.class); } private int bitField0_; public static final int IDENTIFIER_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString identifier_; /** * <code>optional bytes identifier = 1;</code> */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes identifier = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getIdentifier() { return identifier_; } public static final int PASSWORD_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString password_; /** * <code>optional bytes password = 2;</code> */ public boolean hasPassword() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes password = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPassword() { return password_; } public static final int KIND_FIELD_NUMBER = 3; private volatile java.lang.Object kind_; /** * <code>optional string kind = 3;</code> */ public boolean hasKind() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string kind = 3;</code> */ public java.lang.String getKind() { java.lang.Object ref = kind_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { kind_ = s; } return s; } } /** * <code>optional string kind = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getKindBytes() { java.lang.Object ref = kind_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); kind_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int SERVICE_FIELD_NUMBER = 4; private volatile java.lang.Object service_; /** * <code>optional string service = 4;</code> */ public boolean hasService() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string service = 4;</code> */ public java.lang.String getService() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { service_ = s; } return s; } } /** * <code>optional string service = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); service_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, identifier_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, password_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, kind_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, service_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, identifier_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(2, password_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, kind_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, service_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken) obj; boolean result = true; result = result && (hasIdentifier() == other.hasIdentifier()); if (hasIdentifier()) { result = result && getIdentifier() .equals(other.getIdentifier()); } result = result && (hasPassword() == other.hasPassword()); if (hasPassword()) { result = result && getPassword() .equals(other.getPassword()); } result = result && (hasKind() == other.hasKind()); if (hasKind()) { result = result && getKind() .equals(other.getKind()); } result = result && (hasService() == other.hasService()); if (hasService()) { result = result && getService() .equals(other.getService()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIdentifier()) { hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; hash = (53 * hash) + getIdentifier().hashCode(); } if (hasPassword()) { hash = (37 * hash) + PASSWORD_FIELD_NUMBER; hash = (53 * hash) + getPassword().hashCode(); } if (hasKind()) { hash = (37 * hash) + KIND_FIELD_NUMBER; hash = (53 * hash) + getKind().hashCode(); } if (hasService()) { hash = (37 * hash) + SERVICE_FIELD_NUMBER; hash = (53 * hash) + getService().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.DelegationToken} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.DelegationToken) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationTokenOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); identifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); password_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); kind_ = ""; bitField0_ = (bitField0_ & ~0x00000004); service_ = ""; bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.identifier_ = identifier_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.password_ = password_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.kind_ = kind_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.service_ = service_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } if (other.hasPassword()) { setPassword(other.getPassword()); } if (other.hasKind()) { bitField0_ |= 0x00000004; kind_ = other.kind_; onChanged(); } if (other.hasService()) { bitField0_ |= 0x00000008; service_ = other.service_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString identifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes identifier = 1;</code> */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bytes identifier = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getIdentifier() { return identifier_; } /** * <code>optional bytes identifier = 1;</code> */ public Builder setIdentifier(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; identifier_ = value; onChanged(); return this; } /** * <code>optional bytes identifier = 1;</code> */ public Builder clearIdentifier() { bitField0_ = (bitField0_ & ~0x00000001); identifier_ = getDefaultInstance().getIdentifier(); onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString password_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes password = 2;</code> */ public boolean hasPassword() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes password = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getPassword() { return password_; } /** * <code>optional bytes password = 2;</code> */ public Builder setPassword(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; password_ = value; onChanged(); return this; } /** * <code>optional bytes password = 2;</code> */ public Builder clearPassword() { bitField0_ = (bitField0_ & ~0x00000002); password_ = getDefaultInstance().getPassword(); onChanged(); return this; } private java.lang.Object kind_ = ""; /** * <code>optional string kind = 3;</code> */ public boolean hasKind() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string kind = 3;</code> */ public java.lang.String getKind() { java.lang.Object ref = kind_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { kind_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string kind = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getKindBytes() { java.lang.Object ref = kind_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); kind_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string kind = 3;</code> */ public Builder setKind( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; kind_ = value; onChanged(); return this; } /** * <code>optional string kind = 3;</code> */ public Builder clearKind() { bitField0_ = (bitField0_ & ~0x00000004); kind_ = getDefaultInstance().getKind(); onChanged(); return this; } /** * <code>optional string kind = 3;</code> */ public Builder setKindBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; kind_ = value; onChanged(); return this; } private java.lang.Object service_ = ""; /** * <code>optional string service = 4;</code> */ public boolean hasService() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string service = 4;</code> */ public java.lang.String getService() { java.lang.Object ref = service_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { service_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string service = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); service_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>optional string service = 4;</code> */ public Builder setService( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; service_ = value; onChanged(); return this; } /** * <code>optional string service = 4;</code> */ public Builder clearService() { bitField0_ = (bitField0_ & ~0x00000008); service_ = getDefaultInstance().getService(); onChanged(); return this; } /** * <code>optional string service = 4;</code> */ public Builder setServiceBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; service_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.DelegationToken) } // @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DelegationToken> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<DelegationToken>() { public DelegationToken parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new DelegationToken(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DelegationToken> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DelegationToken> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PrepareBulkLoadRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.PrepareBulkLoadRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ boolean hasRegion(); /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } /** * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} */ public static final class PrepareBulkLoadRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.PrepareBulkLoadRequest) PrepareBulkLoadRequestOrBuilder { // Use PrepareBulkLoadRequest.newBuilder() to construct. private PrepareBulkLoadRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PrepareBulkLoadRequest() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private PrepareBulkLoadRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); } private int bitField0_; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } public static final int REGION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } if (hasRegion()) { if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getRegion()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getTableName()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getRegion()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.PrepareBulkLoadRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getRegionFieldBuilder(); } } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = null; } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!getTableName().isInitialized()) { return false; } if (hasRegion()) { if (!getRegion().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != null && tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = null; onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; } } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( getTableName(), getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PrepareBulkLoadRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<PrepareBulkLoadRequest>() { public PrepareBulkLoadRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new PrepareBulkLoadRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PrepareBulkLoadRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PrepareBulkLoadRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface PrepareBulkLoadResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.PrepareBulkLoadResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string bulk_token = 1;</code> */ boolean hasBulkToken(); /** * <code>required string bulk_token = 1;</code> */ java.lang.String getBulkToken(); /** * <code>required string bulk_token = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes(); } /** * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} */ public static final class PrepareBulkLoadResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.PrepareBulkLoadResponse) PrepareBulkLoadResponseOrBuilder { // Use PrepareBulkLoadResponse.newBuilder() to construct. private PrepareBulkLoadResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PrepareBulkLoadResponse() { bulkToken_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private PrepareBulkLoadResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; bulkToken_ = bs; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class); } private int bitField0_; public static final int BULK_TOKEN_FIELD_NUMBER = 1; private volatile java.lang.Object bulkToken_; /** * <code>required string bulk_token = 1;</code> */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string bulk_token = 1;</code> */ public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } } /** * <code>required string bulk_token = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes() { java.lang.Object ref = bulkToken_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); bulkToken_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasBulkToken()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bulkToken_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bulkToken_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) obj; boolean result = true; result = result && (hasBulkToken() == other.hasBulkToken()); if (hasBulkToken()) { result = result && getBulkToken() .equals(other.getBulkToken()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasBulkToken()) { hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getBulkToken().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.PrepareBulkLoadResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.bulkToken_ = bulkToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()) return this; if (other.hasBulkToken()) { bitField0_ |= 0x00000001; bulkToken_ = other.bulkToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasBulkToken()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object bulkToken_ = ""; /** * <code>required string bulk_token = 1;</code> */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string bulk_token = 1;</code> */ public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string bulk_token = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes() { java.lang.Object ref = bulkToken_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); bulkToken_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string bulk_token = 1;</code> */ public Builder setBulkToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; bulkToken_ = value; onChanged(); return this; } /** * <code>required string bulk_token = 1;</code> */ public Builder clearBulkToken() { bitField0_ = (bitField0_ & ~0x00000001); bulkToken_ = getDefaultInstance().getBulkToken(); onChanged(); return this; } /** * <code>required string bulk_token = 1;</code> */ public Builder setBulkTokenBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; bulkToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PrepareBulkLoadResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<PrepareBulkLoadResponse>() { public PrepareBulkLoadResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new PrepareBulkLoadResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PrepareBulkLoadResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<PrepareBulkLoadResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CleanupBulkLoadRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CleanupBulkLoadRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required string bulk_token = 1;</code> */ boolean hasBulkToken(); /** * <code>required string bulk_token = 1;</code> */ java.lang.String getBulkToken(); /** * <code>required string bulk_token = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes(); /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ boolean hasRegion(); /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); } /** * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} */ public static final class CleanupBulkLoadRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CleanupBulkLoadRequest) CleanupBulkLoadRequestOrBuilder { // Use CleanupBulkLoadRequest.newBuilder() to construct. private CleanupBulkLoadRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CleanupBulkLoadRequest() { bulkToken_ = ""; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CleanupBulkLoadRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; bulkToken_ = bs; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class); } private int bitField0_; public static final int BULK_TOKEN_FIELD_NUMBER = 1; private volatile java.lang.Object bulkToken_; /** * <code>required string bulk_token = 1;</code> */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string bulk_token = 1;</code> */ public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } } /** * <code>required string bulk_token = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes() { java.lang.Object ref = bulkToken_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); bulkToken_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int REGION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasBulkToken()) { memoizedIsInitialized = 0; return false; } if (hasRegion()) { if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bulkToken_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getRegion()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bulkToken_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getRegion()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) obj; boolean result = true; result = result && (hasBulkToken() == other.hasBulkToken()); if (hasBulkToken()) { result = result && getBulkToken() .equals(other.getBulkToken()); } result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasBulkToken()) { hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getBulkToken().hashCode(); } if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CleanupBulkLoadRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } public Builder clear() { super.clear(); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.bulkToken_ = bulkToken_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance()) return this; if (other.hasBulkToken()) { bitField0_ |= 0x00000001; bulkToken_ = other.bulkToken_; onChanged(); } if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasBulkToken()) { return false; } if (hasRegion()) { if (!getRegion().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object bulkToken_ = ""; /** * <code>required string bulk_token = 1;</code> */ public boolean hasBulkToken() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required string bulk_token = 1;</code> */ public java.lang.String getBulkToken() { java.lang.Object ref = bulkToken_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { bulkToken_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string bulk_token = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getBulkTokenBytes() { java.lang.Object ref = bulkToken_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); bulkToken_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string bulk_token = 1;</code> */ public Builder setBulkToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; bulkToken_ = value; onChanged(); return this; } /** * <code>required string bulk_token = 1;</code> */ public Builder clearBulkToken() { bitField0_ = (bitField0_ & ~0x00000001); bulkToken_ = getDefaultInstance().getBulkToken(); onChanged(); return this; } /** * <code>required string bulk_token = 1;</code> */ public Builder setBulkTokenBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; bulkToken_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>optional .hbase.pb.RegionSpecifier region = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CleanupBulkLoadRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CleanupBulkLoadRequest>() { public CleanupBulkLoadRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CleanupBulkLoadRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CleanupBulkLoadRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CleanupBulkLoadRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CleanupBulkLoadResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CleanupBulkLoadResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} */ public static final class CleanupBulkLoadResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CleanupBulkLoadResponse) CleanupBulkLoadResponseOrBuilder { // Use CleanupBulkLoadResponse.newBuilder() to construct. private CleanupBulkLoadResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CleanupBulkLoadResponse() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CleanupBulkLoadResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) obj; boolean result = true; result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CleanupBulkLoadResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse(this); onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CleanupBulkLoadResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CleanupBulkLoadResponse>() { public CleanupBulkLoadResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CleanupBulkLoadResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CleanupBulkLoadResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CleanupBulkLoadResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CoprocessorServiceCallOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceCall) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required bytes row = 1;</code> */ boolean hasRow(); /** * <code>required bytes row = 1;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow(); /** * <code>required string service_name = 2;</code> */ boolean hasServiceName(); /** * <code>required string service_name = 2;</code> */ java.lang.String getServiceName(); /** * <code>required string service_name = 2;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceNameBytes(); /** * <code>required string method_name = 3;</code> */ boolean hasMethodName(); /** * <code>required string method_name = 3;</code> */ java.lang.String getMethodName(); /** * <code>required string method_name = 3;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getMethodNameBytes(); /** * <code>required bytes request = 4;</code> */ boolean hasRequest(); /** * <code>required bytes request = 4;</code> */ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRequest(); } /** * Protobuf type {@code hbase.pb.CoprocessorServiceCall} */ public static final class CoprocessorServiceCall extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceCall) CoprocessorServiceCallOrBuilder { // Use CoprocessorServiceCall.newBuilder() to construct. private CoprocessorServiceCall(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CoprocessorServiceCall() { row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; serviceName_ = ""; methodName_ = ""; request_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CoprocessorServiceCall( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; row_ = input.readBytes(); break; } case 18: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; serviceName_ = bs; break; } case 26: { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; methodName_ = bs; break; } case 34: { bitField0_ |= 0x00000008; request_ = input.readBytes(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); } private int bitField0_; public static final int ROW_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_; /** * <code>required bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } public static final int SERVICE_NAME_FIELD_NUMBER = 2; private volatile java.lang.Object serviceName_; /** * <code>required string service_name = 2;</code> */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string service_name = 2;</code> */ public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { serviceName_ = s; } return s; } } /** * <code>required string service_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceNameBytes() { java.lang.Object ref = serviceName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int METHOD_NAME_FIELD_NUMBER = 3; private volatile java.lang.Object methodName_; /** * <code>required string method_name = 3;</code> */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string method_name = 3;</code> */ public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { methodName_ = s; } return s; } } /** * <code>required string method_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof java.lang.String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); methodName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } public static final int REQUEST_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString request_; /** * <code>required bytes request = 4;</code> */ public boolean hasRequest() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required bytes request = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRequest() { return request_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRow()) { memoizedIsInitialized = 0; return false; } if (!hasServiceName()) { memoizedIsInitialized = 0; return false; } if (!hasMethodName()) { memoizedIsInitialized = 0; return false; } if (!hasRequest()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, request_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(1, row_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(3, methodName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBytesSize(4, request_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); if (hasRow()) { result = result && getRow() .equals(other.getRow()); } result = result && (hasServiceName() == other.hasServiceName()); if (hasServiceName()) { result = result && getServiceName() .equals(other.getServiceName()); } result = result && (hasMethodName() == other.hasMethodName()); if (hasMethodName()) { result = result && getMethodName() .equals(other.getMethodName()); } result = result && (hasRequest() == other.hasRequest()); if (hasRequest()) { result = result && getRequest() .equals(other.getRequest()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRow()) { hash = (37 * hash) + ROW_FIELD_NUMBER; hash = (53 * hash) + getRow().hashCode(); } if (hasServiceName()) { hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER; hash = (53 * hash) + getServiceName().hashCode(); } if (hasMethodName()) { hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER; hash = (53 * hash) + getMethodName().hashCode(); } if (hasRequest()) { hash = (37 * hash) + REQUEST_FIELD_NUMBER; hash = (53 * hash) + getRequest().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CoprocessorServiceCall} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceCall) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); serviceName_ = ""; bitField0_ = (bitField0_ & ~0x00000002); methodName_ = ""; bitField0_ = (bitField0_ & ~0x00000004); request_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceCall_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.row_ = row_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.serviceName_ = serviceName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.methodName_ = methodName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.request_ = request_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } if (other.hasServiceName()) { bitField0_ |= 0x00000002; serviceName_ = other.serviceName_; onChanged(); } if (other.hasMethodName()) { bitField0_ |= 0x00000004; methodName_ = other.methodName_; onChanged(); } if (other.hasRequest()) { setRequest(other.getRequest()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRow()) { return false; } if (!hasServiceName()) { return false; } if (!hasMethodName()) { return false; } if (!hasRequest()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString row_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes row = 1;</code> */ public boolean hasRow() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes row = 1;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRow() { return row_; } /** * <code>required bytes row = 1;</code> */ public Builder setRow(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; row_ = value; onChanged(); return this; } /** * <code>required bytes row = 1;</code> */ public Builder clearRow() { bitField0_ = (bitField0_ & ~0x00000001); row_ = getDefaultInstance().getRow(); onChanged(); return this; } private java.lang.Object serviceName_ = ""; /** * <code>required string service_name = 2;</code> */ public boolean hasServiceName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string service_name = 2;</code> */ public java.lang.String getServiceName() { java.lang.Object ref = serviceName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { serviceName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string service_name = 2;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getServiceNameBytes() { java.lang.Object ref = serviceName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); serviceName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string service_name = 2;</code> */ public Builder setServiceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); return this; } /** * <code>required string service_name = 2;</code> */ public Builder clearServiceName() { bitField0_ = (bitField0_ & ~0x00000002); serviceName_ = getDefaultInstance().getServiceName(); onChanged(); return this; } /** * <code>required string service_name = 2;</code> */ public Builder setServiceNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; serviceName_ = value; onChanged(); return this; } private java.lang.Object methodName_ = ""; /** * <code>required string method_name = 3;</code> */ public boolean hasMethodName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string method_name = 3;</code> */ public java.lang.String getMethodName() { java.lang.Object ref = methodName_; if (!(ref instanceof java.lang.String)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { methodName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string method_name = 3;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getMethodNameBytes() { java.lang.Object ref = methodName_; if (ref instanceof String) { org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); methodName_ = b; return b; } else { return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; } } /** * <code>required string method_name = 3;</code> */ public Builder setMethodName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; methodName_ = value; onChanged(); return this; } /** * <code>required string method_name = 3;</code> */ public Builder clearMethodName() { bitField0_ = (bitField0_ & ~0x00000004); methodName_ = getDefaultInstance().getMethodName(); onChanged(); return this; } /** * <code>required string method_name = 3;</code> */ public Builder setMethodNameBytes( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; methodName_ = value; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString request_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes request = 4;</code> */ public boolean hasRequest() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required bytes request = 4;</code> */ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getRequest() { return request_; } /** * <code>required bytes request = 4;</code> */ public Builder setRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; request_ = value; onChanged(); return this; } /** * <code>required bytes request = 4;</code> */ public Builder clearRequest() { bitField0_ = (bitField0_ & ~0x00000008); request_ = getDefaultInstance().getRequest(); onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceCall) } // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceCall) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceCall> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CoprocessorServiceCall>() { public CoprocessorServiceCall parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CoprocessorServiceCall(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceCall> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceCall> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CoprocessorServiceResultOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceResult) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ boolean hasValue(); /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue(); /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); } /** * Protobuf type {@code hbase.pb.CoprocessorServiceResult} */ public static final class CoprocessorServiceResult extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceResult) CoprocessorServiceResultOrBuilder { // Use CoprocessorServiceResult.newBuilder() to construct. private CoprocessorServiceResult(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CoprocessorServiceResult() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CoprocessorServiceResult( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = value_.toBuilder(); } value_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(value_); value_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class); } private int bitField0_; public static final int VALUE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_; /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasValue()) { if (!getValue().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getValue()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getValue()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult) obj; boolean result = true; result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CoprocessorServiceResult} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceResult) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getValueFieldBuilder(); } } public Builder clear() { super.clear(); if (valueBuilder_ == null) { value_ = null; } else { valueBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResult_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (valueBuilder_ == null) { result.value_ = value_; } else { result.value_ = valueBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) return this; if (other.hasValue()) { mergeValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasValue()) { if (!getValue().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { if (valueBuilder_ == null) { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } else { return valueBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public Builder setValue(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } value_ = value; onChanged(); } else { valueBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public Builder setValue( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (valueBuilder_ == null) { value_ = builderForValue.build(); onChanged(); } else { valueBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public Builder mergeValue(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && value_ != null && value_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); } else { value_ = value; } onChanged(); } else { valueBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public Builder clearValue() { if (valueBuilder_ == null) { value_ = null; onChanged(); } else { valueBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { bitField0_ |= 0x00000001; onChanged(); return getValueFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } } /** * <code>optional .hbase.pb.NameBytesPair value = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { valueBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( getValue(), getParentForChildren(), isClean()); value_ = null; } return valueBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResult) } // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResult) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceResult> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CoprocessorServiceResult>() { public CoprocessorServiceResult parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CoprocessorServiceResult(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceResult> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceResult> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CoprocessorServiceRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ boolean hasCall(); /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall(); /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder(); } /** * Protobuf type {@code hbase.pb.CoprocessorServiceRequest} */ public static final class CoprocessorServiceRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceRequest) CoprocessorServiceRequestOrBuilder { // Use CoprocessorServiceRequest.newBuilder() to construct. private CoprocessorServiceRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CoprocessorServiceRequest() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CoprocessorServiceRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = call_.toBuilder(); } call_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(call_); call_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int CALL_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall call_; /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public boolean hasCall() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!hasCall()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getCall().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getCall()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getCall()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasCall() == other.hasCall()); if (hasCall()) { result = result && getCall() .equals(other.getCall()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasCall()) { hash = (37 * hash) + CALL_FIELD_NUMBER; hash = (53 * hash) + getCall().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CoprocessorServiceRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getCallFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (callBuilder_ == null) { call_ = null; } else { callBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (callBuilder_ == null) { result.call_ = call_; } else { result.call_ = callBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasCall()) { mergeCall(other.getCall()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!hasCall()) { return false; } if (!getRegion().isInitialized()) { return false; } if (!getCall().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_; /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public boolean hasCall() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() { if (callBuilder_ == null) { return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } else { return callBuilder_.getMessage(); } } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public Builder setCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (callBuilder_ == null) { if (value == null) { throw new NullPointerException(); } call_ = value; onChanged(); } else { callBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public Builder setCall( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) { if (callBuilder_ == null) { call_ = builderForValue.build(); onChanged(); } else { callBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public Builder mergeCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (callBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && call_ != null && call_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) { call_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial(); } else { call_ = value; } onChanged(); } else { callBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public Builder clearCall() { if (callBuilder_ == null) { call_ = null; onChanged(); } else { callBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCallFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() { if (callBuilder_ != null) { return callBuilder_.getMessageOrBuilder(); } else { return call_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : call_; } } /** * <code>required .hbase.pb.CoprocessorServiceCall call = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> getCallFieldBuilder() { if (callBuilder_ == null) { callBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>( getCall(), getParentForChildren(), isClean()); call_ = null; } return callBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CoprocessorServiceRequest>() { public CoprocessorServiceRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CoprocessorServiceRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CoprocessorServiceResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.CoprocessorServiceResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ boolean hasValue(); /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue(); /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); } /** * Protobuf type {@code hbase.pb.CoprocessorServiceResponse} */ public static final class CoprocessorServiceResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.CoprocessorServiceResponse) CoprocessorServiceResponseOrBuilder { // Use CoprocessorServiceResponse.newBuilder() to construct. private CoprocessorServiceResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CoprocessorServiceResponse() { } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CoprocessorServiceResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = value_.toBuilder(); } value_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(value_); value_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int VALUE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_; /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!hasValue()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } if (!getValue().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getValue()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getValue()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() .equals(other.getValue()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CoprocessorServiceResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.CoprocessorServiceResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getValueFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (valueBuilder_ == null) { value_ = null; } else { valueBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (valueBuilder_ == null) { result.value_ = value_; } else { result.value_ = valueBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasValue()) { mergeValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!hasValue()) { return false; } if (!getRegion().isInitialized()) { return false; } if (!getValue().isInitialized()) { return false; } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public boolean hasValue() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getValue() { if (valueBuilder_ == null) { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } else { return valueBuilder_.getMessage(); } } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public Builder setValue(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } value_ = value; onChanged(); } else { valueBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public Builder setValue( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (valueBuilder_ == null) { value_ = builderForValue.build(); onChanged(); } else { valueBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public Builder mergeValue(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && value_ != null && value_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { value_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); } else { value_ = value; } onChanged(); } else { valueBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public Builder clearValue() { if (valueBuilder_ == null) { value_ = null; onChanged(); } else { valueBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { bitField0_ |= 0x00000002; onChanged(); return getValueFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { return value_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : value_; } } /** * <code>required .hbase.pb.NameBytesPair value = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { valueBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( getValue(), getParentForChildren(), isClean()); value_ = null; } return valueBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.CoprocessorServiceResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.CoprocessorServiceResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<CoprocessorServiceResponse>() { public CoprocessorServiceResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CoprocessorServiceResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<CoprocessorServiceResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ActionOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.Action) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ boolean hasIndex(); /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ int getIndex(); /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ boolean hasMutation(); /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation(); /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); /** * <code>optional .hbase.pb.Get get = 3;</code> */ boolean hasGet(); /** * <code>optional .hbase.pb.Get get = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet(); /** * <code>optional .hbase.pb.Get get = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ boolean hasServiceCall(); /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall(); /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder(); } /** * <pre> * Either a Get or a Mutation * </pre> * * Protobuf type {@code hbase.pb.Action} */ public static final class Action extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.Action) ActionOrBuilder { // Use Action.newBuilder() to construct. private Action(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Action() { index_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Action( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; index_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = mutation_.toBuilder(); } mutation_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(mutation_); mutation_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = get_.toBuilder(); } get_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(get_); get_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 34: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null; if (((bitField0_ & 0x00000008) == 0x00000008)) { subBuilder = serviceCall_.toBuilder(); } serviceCall_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(serviceCall_); serviceCall_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder.class); } private int bitField0_; public static final int INDEX_FIELD_NUMBER = 1; private int index_; /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ public int getIndex() { return index_; } public static final int MUTATION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_; /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public boolean hasMutation() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } public static final int GET_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_; /** * <code>optional .hbase.pb.Get get = 3;</code> */ public boolean hasGet() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } public static final int SERVICE_CALL_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_; /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public boolean hasServiceCall() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() { return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() { return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasMutation()) { if (!getMutation().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasGet()) { if (!getGet().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasServiceCall()) { if (!getServiceCall().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, index_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, getGet()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeMessage(4, getServiceCall()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, index_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getMutation()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getGet()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, getServiceCall()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action) obj; boolean result = true; result = result && (hasIndex() == other.hasIndex()); if (hasIndex()) { result = result && (getIndex() == other.getIndex()); } result = result && (hasMutation() == other.hasMutation()); if (hasMutation()) { result = result && getMutation() .equals(other.getMutation()); } result = result && (hasGet() == other.hasGet()); if (hasGet()) { result = result && getGet() .equals(other.getGet()); } result = result && (hasServiceCall() == other.hasServiceCall()); if (hasServiceCall()) { result = result && getServiceCall() .equals(other.getServiceCall()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIndex()) { hash = (37 * hash) + INDEX_FIELD_NUMBER; hash = (53 * hash) + getIndex(); } if (hasMutation()) { hash = (37 * hash) + MUTATION_FIELD_NUMBER; hash = (53 * hash) + getMutation().hashCode(); } if (hasGet()) { hash = (37 * hash) + GET_FIELD_NUMBER; hash = (53 * hash) + getGet().hashCode(); } if (hasServiceCall()) { hash = (37 * hash) + SERVICE_CALL_FIELD_NUMBER; hash = (53 * hash) + getServiceCall().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Either a Get or a Mutation * </pre> * * Protobuf type {@code hbase.pb.Action} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.Action) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMutationFieldBuilder(); getGetFieldBuilder(); getServiceCallFieldBuilder(); } } public Builder clear() { super.clear(); index_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (mutationBuilder_ == null) { mutation_ = null; } else { mutationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (getBuilder_ == null) { get_ = null; } else { getBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (serviceCallBuilder_ == null) { serviceCall_ = null; } else { serviceCallBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_Action_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.index_ = index_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (mutationBuilder_ == null) { result.mutation_ = mutation_; } else { result.mutation_ = mutationBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (getBuilder_ == null) { result.get_ = get_; } else { result.get_ = getBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } if (serviceCallBuilder_ == null) { result.serviceCall_ = serviceCall_; } else { result.serviceCall_ = serviceCallBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.getDefaultInstance()) return this; if (other.hasIndex()) { setIndex(other.getIndex()); } if (other.hasMutation()) { mergeMutation(other.getMutation()); } if (other.hasGet()) { mergeGet(other.getGet()); } if (other.hasServiceCall()) { mergeServiceCall(other.getServiceCall()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasMutation()) { if (!getMutation().isInitialized()) { return false; } } if (hasGet()) { if (!getGet().isInitialized()) { return false; } } if (hasServiceCall()) { if (!getServiceCall().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int index_ ; /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ public int getIndex() { return index_; } /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ public Builder setIndex(int value) { bitField0_ |= 0x00000001; index_ = value; onChanged(); return this; } /** * <pre> * If part of a multi action, useful aligning * result with what was originally submitted. * </pre> * * <code>optional uint32 index = 1;</code> */ public Builder clearIndex() { bitField0_ = (bitField0_ & ~0x00000001); index_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto mutation_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public boolean hasMutation() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto getMutation() { if (mutationBuilder_ == null) { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } else { return mutationBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public Builder setMutation(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mutation_ = value; onChanged(); } else { mutationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public Builder setMutation( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { if (mutationBuilder_ == null) { mutation_ = builderForValue.build(); onChanged(); } else { mutationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public Builder mergeMutation(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto value) { if (mutationBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && mutation_ != null && mutation_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) { mutation_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial(); } else { mutation_ = value; } onChanged(); } else { mutationBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public Builder clearMutation() { if (mutationBuilder_ == null) { mutation_ = null; onChanged(); } else { mutationBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMutationFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { if (mutationBuilder_ != null) { return mutationBuilder_.getMessageOrBuilder(); } else { return mutation_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance() : mutation_; } } /** * <code>optional .hbase.pb.MutationProto mutation = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder() { if (mutationBuilder_ == null) { mutationBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( getMutation(), getParentForChildren(), isClean()); mutation_ = null; } return mutationBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get get_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; /** * <code>optional .hbase.pb.Get get = 3;</code> */ public boolean hasGet() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } else { return getBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public Builder setGet(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (value == null) { throw new NullPointerException(); } get_ = value; onChanged(); } else { getBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public Builder setGet( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { get_ = builderForValue.build(); onChanged(); } else { getBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public Builder mergeGet(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && get_ != null && get_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { get_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); } else { get_ = value; } onChanged(); } else { getBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public Builder clearGet() { if (getBuilder_ == null) { get_ = null; onChanged(); } else { getBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { bitField0_ |= 0x00000004; onChanged(); return getGetFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Get get = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); } else { return get_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.getDefaultInstance() : get_; } } /** * <code>optional .hbase.pb.Get get = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { if (getBuilder_ == null) { getBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetOrBuilder>( getGet(), getParentForChildren(), isClean()); get_ = null; } return getBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> serviceCallBuilder_; /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public boolean hasServiceCall() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() { if (serviceCallBuilder_ == null) { return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } else { return serviceCallBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public Builder setServiceCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (serviceCallBuilder_ == null) { if (value == null) { throw new NullPointerException(); } serviceCall_ = value; onChanged(); } else { serviceCallBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public Builder setServiceCall( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) { if (serviceCallBuilder_ == null) { serviceCall_ = builderForValue.build(); onChanged(); } else { serviceCallBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public Builder mergeServiceCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall value) { if (serviceCallBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && serviceCall_ != null && serviceCall_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) { serviceCall_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(serviceCall_).mergeFrom(value).buildPartial(); } else { serviceCall_ = value; } onChanged(); } else { serviceCallBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public Builder clearServiceCall() { if (serviceCallBuilder_ == null) { serviceCall_ = null; onChanged(); } else { serviceCallBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getServiceCallBuilder() { bitField0_ |= 0x00000008; onChanged(); return getServiceCallFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() { if (serviceCallBuilder_ != null) { return serviceCallBuilder_.getMessageOrBuilder(); } else { return serviceCall_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance() : serviceCall_; } } /** * <code>optional .hbase.pb.CoprocessorServiceCall service_call = 4;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> getServiceCallFieldBuilder() { if (serviceCallBuilder_ == null) { serviceCallBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>( getServiceCall(), getParentForChildren(), isClean()); serviceCall_ = null; } return serviceCallBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.Action) } // @@protoc_insertion_point(class_scope:hbase.pb.Action) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Action> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Action>() { public Action parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new Action(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Action> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Action> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionActionOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionAction) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ boolean hasRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ boolean hasAtomic(); /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ boolean getAtomic(); /** * <code>repeated .hbase.pb.Action action = 3;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action> getActionList(); /** * <code>repeated .hbase.pb.Action action = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getAction(int index); /** * <code>repeated .hbase.pb.Action action = 3;</code> */ int getActionCount(); /** * <code>repeated .hbase.pb.Action action = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> getActionOrBuilderList(); /** * <code>repeated .hbase.pb.Action action = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder( int index); } /** * <pre> ** * Actions to run against a Region. * </pre> * * Protobuf type {@code hbase.pb.RegionAction} */ public static final class RegionAction extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionAction) RegionActionOrBuilder { // Use RegionAction.newBuilder() to construct. private RegionAction(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionAction() { atomic_ = false; action_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionAction( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = region_.toBuilder(); } region_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(region_); region_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 16: { bitField0_ |= 0x00000002; atomic_ = input.readBool(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { action_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action>(); mutable_bitField0_ |= 0x00000004; } action_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { action_ = java.util.Collections.unmodifiableList(action_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder.class); } private int bitField0_; public static final int REGION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } public static final int ATOMIC_FIELD_NUMBER = 2; private boolean atomic_; /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ public boolean hasAtomic() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ public boolean getAtomic() { return atomic_; } public static final int ACTION_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action> action_; /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action> getActionList() { return action_; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> getActionOrBuilderList() { return action_; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public int getActionCount() { return action_.size(); } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getAction(int index) { return action_.get(index); } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder( int index) { return action_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasRegion()) { memoizedIsInitialized = 0; return false; } if (!getRegion().isInitialized()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getActionCount(); i++) { if (!getAction(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(2, atomic_); } for (int i = 0; i < action_.size(); i++) { output.writeMessage(3, action_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRegion()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, atomic_); } for (int i = 0; i < action_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, action_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); if (hasRegion()) { result = result && getRegion() .equals(other.getRegion()); } result = result && (hasAtomic() == other.hasAtomic()); if (hasAtomic()) { result = result && (getAtomic() == other.getAtomic()); } result = result && getActionList() .equals(other.getActionList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } if (hasAtomic()) { hash = (37 * hash) + ATOMIC_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getAtomic()); } if (getActionCount() > 0) { hash = (37 * hash) + ACTION_FIELD_NUMBER; hash = (53 * hash) + getActionList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Actions to run against a Region. * </pre> * * Protobuf type {@code hbase.pb.RegionAction} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionAction) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getActionFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = null; } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); atomic_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (actionBuilder_ == null) { action_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { actionBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionAction_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (regionBuilder_ == null) { result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.atomic_ = atomic_; if (actionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { action_ = java.util.Collections.unmodifiableList(action_); bitField0_ = (bitField0_ & ~0x00000004); } result.action_ = action_; } else { result.action_ = actionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } if (other.hasAtomic()) { setAtomic(other.getAtomic()); } if (actionBuilder_ == null) { if (!other.action_.isEmpty()) { if (action_.isEmpty()) { action_ = other.action_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureActionIsMutable(); action_.addAll(other.action_); } onChanged(); } } else { if (!other.action_.isEmpty()) { if (actionBuilder_.isEmpty()) { actionBuilder_.dispose(); actionBuilder_ = null; action_ = other.action_; bitField0_ = (bitField0_ & ~0x00000004); actionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getActionFieldBuilder() : null; } else { actionBuilder_.addAllMessages(other.action_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (!hasRegion()) { return false; } if (!getRegion().isInitialized()) { return false; } for (int i = 0; i < getActionCount(); i++) { if (!getAction(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier region_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { if (regionBuilder_ == null) { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); onChanged(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder mergeRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && region_ != null && region_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { region_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); } else { region_ = value; } onChanged(); } else { regionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = null; onChanged(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : region_; } } /** * <code>required .hbase.pb.RegionSpecifier region = 1;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private boolean atomic_ ; /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ public boolean hasAtomic() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ public boolean getAtomic() { return atomic_; } /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ public Builder setAtomic(boolean value) { bitField0_ |= 0x00000002; atomic_ = value; onChanged(); return this; } /** * <pre> * When set, run mutations as atomic unit. * </pre> * * <code>optional bool atomic = 2;</code> */ public Builder clearAtomic() { bitField0_ = (bitField0_ & ~0x00000002); atomic_ = false; onChanged(); return this; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action> action_ = java.util.Collections.emptyList(); private void ensureActionIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { action_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action>(action_); bitField0_ |= 0x00000004; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> actionBuilder_; /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action> getActionList() { if (actionBuilder_ == null) { return java.util.Collections.unmodifiableList(action_); } else { return actionBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public int getActionCount() { if (actionBuilder_ == null) { return action_.size(); } else { return actionBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action getAction(int index) { if (actionBuilder_ == null) { return action_.get(index); } else { return actionBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder setAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action value) { if (actionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureActionIsMutable(); action_.set(index, value); onChanged(); } else { actionBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder setAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder builderForValue) { if (actionBuilder_ == null) { ensureActionIsMutable(); action_.set(index, builderForValue.build()); onChanged(); } else { actionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder addAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action value) { if (actionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureActionIsMutable(); action_.add(value); onChanged(); } else { actionBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder addAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action value) { if (actionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureActionIsMutable(); action_.add(index, value); onChanged(); } else { actionBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder addAction( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder builderForValue) { if (actionBuilder_ == null) { ensureActionIsMutable(); action_.add(builderForValue.build()); onChanged(); } else { actionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder addAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder builderForValue) { if (actionBuilder_ == null) { ensureActionIsMutable(); action_.add(index, builderForValue.build()); onChanged(); } else { actionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder addAllAction( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action> values) { if (actionBuilder_ == null) { ensureActionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, action_); onChanged(); } else { actionBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder clearAction() { if (actionBuilder_ == null) { action_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { actionBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public Builder removeAction(int index) { if (actionBuilder_ == null) { ensureActionIsMutable(); action_.remove(index); onChanged(); } else { actionBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder getActionBuilder( int index) { return getActionFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder( int index) { if (actionBuilder_ == null) { return action_.get(index); } else { return actionBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> getActionOrBuilderList() { if (actionBuilder_ != null) { return actionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(action_); } } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder addActionBuilder() { return getActionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder addActionBuilder( int index) { return getActionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.getDefaultInstance()); } /** * <code>repeated .hbase.pb.Action action = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder> getActionBuilderList() { return getActionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder> getActionFieldBuilder() { if (actionBuilder_ == null) { actionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ActionOrBuilder>( action_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); action_ = null; } return actionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionAction) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionAction) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionAction> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionAction>() { public RegionAction parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionAction(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionAction> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionAction> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionLoadStatsOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionLoadStats) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ boolean hasMemstoreLoad(); /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ int getMemstoreLoad(); /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ boolean hasHeapOccupancy(); /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ int getHeapOccupancy(); /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ boolean hasCompactionPressure(); /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ int getCompactionPressure(); } /** * <pre> * Statistics about the current load on the region * </pre> * * Protobuf type {@code hbase.pb.RegionLoadStats} */ public static final class RegionLoadStats extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionLoadStats) RegionLoadStatsOrBuilder { // Use RegionLoadStats.newBuilder() to construct. private RegionLoadStats(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionLoadStats() { memstoreLoad_ = 0; heapOccupancy_ = 0; compactionPressure_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionLoadStats( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; memstoreLoad_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; heapOccupancy_ = input.readInt32(); break; } case 24: { bitField0_ |= 0x00000004; compactionPressure_ = input.readInt32(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class); } private int bitField0_; public static final int MEMSTORELOAD_FIELD_NUMBER = 1; private int memstoreLoad_; /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ public boolean hasMemstoreLoad() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ public int getMemstoreLoad() { return memstoreLoad_; } public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2; private int heapOccupancy_; /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ public boolean hasHeapOccupancy() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ public int getHeapOccupancy() { return heapOccupancy_; } public static final int COMPACTIONPRESSURE_FIELD_NUMBER = 3; private int compactionPressure_; /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ public boolean hasCompactionPressure() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ public int getCompactionPressure() { return compactionPressure_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, memstoreLoad_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, heapOccupancy_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, compactionPressure_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(1, memstoreLoad_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(2, heapOccupancy_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt32Size(3, compactionPressure_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats) obj; boolean result = true; result = result && (hasMemstoreLoad() == other.hasMemstoreLoad()); if (hasMemstoreLoad()) { result = result && (getMemstoreLoad() == other.getMemstoreLoad()); } result = result && (hasHeapOccupancy() == other.hasHeapOccupancy()); if (hasHeapOccupancy()) { result = result && (getHeapOccupancy() == other.getHeapOccupancy()); } result = result && (hasCompactionPressure() == other.hasCompactionPressure()); if (hasCompactionPressure()) { result = result && (getCompactionPressure() == other.getCompactionPressure()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMemstoreLoad()) { hash = (37 * hash) + MEMSTORELOAD_FIELD_NUMBER; hash = (53 * hash) + getMemstoreLoad(); } if (hasHeapOccupancy()) { hash = (37 * hash) + HEAPOCCUPANCY_FIELD_NUMBER; hash = (53 * hash) + getHeapOccupancy(); } if (hasCompactionPressure()) { hash = (37 * hash) + COMPACTIONPRESSURE_FIELD_NUMBER; hash = (53 * hash) + getCompactionPressure(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Statistics about the current load on the region * </pre> * * Protobuf type {@code hbase.pb.RegionLoadStats} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionLoadStats) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); memstoreLoad_ = 0; bitField0_ = (bitField0_ & ~0x00000001); heapOccupancy_ = 0; bitField0_ = (bitField0_ & ~0x00000002); compactionPressure_ = 0; bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionLoadStats_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.memstoreLoad_ = memstoreLoad_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.heapOccupancy_ = heapOccupancy_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.compactionPressure_ = compactionPressure_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) return this; if (other.hasMemstoreLoad()) { setMemstoreLoad(other.getMemstoreLoad()); } if (other.hasHeapOccupancy()) { setHeapOccupancy(other.getHeapOccupancy()); } if (other.hasCompactionPressure()) { setCompactionPressure(other.getCompactionPressure()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int memstoreLoad_ ; /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ public boolean hasMemstoreLoad() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ public int getMemstoreLoad() { return memstoreLoad_; } /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ public Builder setMemstoreLoad(int value) { bitField0_ |= 0x00000001; memstoreLoad_ = value; onChanged(); return this; } /** * <pre> * Percent load on the memstore. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 memstoreLoad = 1 [default = 0];</code> */ public Builder clearMemstoreLoad() { bitField0_ = (bitField0_ & ~0x00000001); memstoreLoad_ = 0; onChanged(); return this; } private int heapOccupancy_ ; /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ public boolean hasHeapOccupancy() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ public int getHeapOccupancy() { return heapOccupancy_; } /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ public Builder setHeapOccupancy(int value) { bitField0_ |= 0x00000002; heapOccupancy_ = value; onChanged(); return this; } /** * <pre> * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100. * We can move this to "ServerLoadStats" should we develop them. * </pre> * * <code>optional int32 heapOccupancy = 2 [default = 0];</code> */ public Builder clearHeapOccupancy() { bitField0_ = (bitField0_ & ~0x00000002); heapOccupancy_ = 0; onChanged(); return this; } private int compactionPressure_ ; /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ public boolean hasCompactionPressure() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ public int getCompactionPressure() { return compactionPressure_; } /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ public Builder setCompactionPressure(int value) { bitField0_ |= 0x00000004; compactionPressure_ = value; onChanged(); return this; } /** * <pre> * Compaction pressure. Guaranteed to be positive, between 0 and 100. * </pre> * * <code>optional int32 compactionPressure = 3 [default = 0];</code> */ public Builder clearCompactionPressure() { bitField0_ = (bitField0_ & ~0x00000004); compactionPressure_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionLoadStats) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionLoadStats) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionLoadStats> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionLoadStats>() { public RegionLoadStats parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionLoadStats(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionLoadStats> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionLoadStats> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MultiRegionLoadStatsOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MultiRegionLoadStats) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier> getRegionList(); /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index); /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ int getRegionCount(); /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionOrBuilderList(); /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( int index); /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats> getStatList(); /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getStat(int index); /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ int getStatCount(); /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getStatOrBuilderList(); /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getStatOrBuilder( int index); } /** * Protobuf type {@code hbase.pb.MultiRegionLoadStats} */ public static final class MultiRegionLoadStats extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MultiRegionLoadStats) MultiRegionLoadStatsOrBuilder { // Use MultiRegionLoadStats.newBuilder() to construct. private MultiRegionLoadStats(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MultiRegionLoadStats() { region_ = java.util.Collections.emptyList(); stat_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MultiRegionLoadStats( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { region_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier>(); mutable_bitField0_ |= 0x00000001; } region_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry)); break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { stat_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats>(); mutable_bitField0_ |= 0x00000002; } stat_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry)); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { region_ = java.util.Collections.unmodifiableList(region_); } if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { stat_ = java.util.Collections.unmodifiableList(stat_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder.class); } public static final int REGION_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier> region_; /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier> getRegionList() { return region_; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionOrBuilderList() { return region_; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public int getRegionCount() { return region_.size(); } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { return region_.get(index); } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( int index) { return region_.get(index); } public static final int STAT_FIELD_NUMBER = 2; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats> stat_; /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats> getStatList() { return stat_; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getStatOrBuilderList() { return stat_; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public int getStatCount() { return stat_.size(); } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getStat(int index) { return stat_.get(index); } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getStatOrBuilder( int index) { return stat_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getRegionCount(); i++) { if (!getRegion(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < region_.size(); i++) { output.writeMessage(1, region_.get(i)); } for (int i = 0; i < stat_.size(); i++) { output.writeMessage(2, stat_.get(i)); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < region_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_.get(i)); } for (int i = 0; i < stat_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, stat_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats) obj; boolean result = true; result = result && getRegionList() .equals(other.getRegionList()); result = result && getStatList() .equals(other.getStatList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRegionCount() > 0) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegionList().hashCode(); } if (getStatCount() > 0) { hash = (37 * hash) + STAT_FIELD_NUMBER; hash = (53 * hash) + getStatList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MultiRegionLoadStats} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MultiRegionLoadStats) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionFieldBuilder(); getStatFieldBuilder(); } } public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { regionBuilder_.clear(); } if (statBuilder_ == null) { stat_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { statBuilder_.clear(); } return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRegionLoadStats_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats(this); int from_bitField0_ = bitField0_; if (regionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { region_ = java.util.Collections.unmodifiableList(region_); bitField0_ = (bitField0_ & ~0x00000001); } result.region_ = region_; } else { result.region_ = regionBuilder_.build(); } if (statBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { stat_ = java.util.Collections.unmodifiableList(stat_); bitField0_ = (bitField0_ & ~0x00000002); } result.stat_ = stat_; } else { result.stat_ = statBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance()) return this; if (regionBuilder_ == null) { if (!other.region_.isEmpty()) { if (region_.isEmpty()) { region_ = other.region_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRegionIsMutable(); region_.addAll(other.region_); } onChanged(); } } else { if (!other.region_.isEmpty()) { if (regionBuilder_.isEmpty()) { regionBuilder_.dispose(); regionBuilder_ = null; region_ = other.region_; bitField0_ = (bitField0_ & ~0x00000001); regionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionFieldBuilder() : null; } else { regionBuilder_.addAllMessages(other.region_); } } } if (statBuilder_ == null) { if (!other.stat_.isEmpty()) { if (stat_.isEmpty()) { stat_ = other.stat_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureStatIsMutable(); stat_.addAll(other.stat_); } onChanged(); } } else { if (!other.stat_.isEmpty()) { if (statBuilder_.isEmpty()) { statBuilder_.dispose(); statBuilder_ = null; stat_ = other.stat_; bitField0_ = (bitField0_ & ~0x00000002); statBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getStatFieldBuilder() : null; } else { statBuilder_.addAllMessages(other.stat_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionCount(); i++) { if (!getRegion(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier> region_ = java.util.Collections.emptyList(); private void ensureRegionIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { region_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier>(region_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier> getRegionList() { if (regionBuilder_ == null) { return java.util.Collections.unmodifiableList(region_); } else { return regionBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public int getRegionCount() { if (regionBuilder_ == null) { return region_.size(); } else { return regionBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { if (regionBuilder_ == null) { return region_.get(index); } else { return regionBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionIsMutable(); region_.set(index, value); onChanged(); } else { regionBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder setRegion( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { ensureRegionIsMutable(); region_.set(index, builderForValue.build()); onChanged(); } else { regionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder addRegion(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionIsMutable(); region_.add(value); onChanged(); } else { regionBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder addRegion( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionIsMutable(); region_.add(index, value); onChanged(); } else { regionBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder addRegion( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { ensureRegionIsMutable(); region_.add(builderForValue.build()); onChanged(); } else { regionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder addRegion( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { if (regionBuilder_ == null) { ensureRegionIsMutable(); region_.add(index, builderForValue.build()); onChanged(); } else { regionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder addAllRegion( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier> values) { if (regionBuilder_ == null) { ensureRegionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, region_); onChanged(); } else { regionBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder clearRegion() { if (regionBuilder_ == null) { region_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { regionBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public Builder removeRegion(int index) { if (regionBuilder_ == null) { ensureRegionIsMutable(); region_.remove(index); onChanged(); } else { regionBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder( int index) { return getRegionFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( int index) { if (regionBuilder_ == null) { return region_.get(index); } else { return regionBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionOrBuilderList() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(region_); } } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder() { return getRegionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder( int index) { return getRegionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionSpecifier region = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder> getRegionBuilderList() { return getRegionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( region_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats> stat_ = java.util.Collections.emptyList(); private void ensureStatIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { stat_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats>(stat_); bitField0_ |= 0x00000002; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> statBuilder_; /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats> getStatList() { if (statBuilder_ == null) { return java.util.Collections.unmodifiableList(stat_); } else { return statBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public int getStatCount() { if (statBuilder_ == null) { return stat_.size(); } else { return statBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getStat(int index) { if (statBuilder_ == null) { return stat_.get(index); } else { return statBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder setStat( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (statBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStatIsMutable(); stat_.set(index, value); onChanged(); } else { statBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder setStat( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) { if (statBuilder_ == null) { ensureStatIsMutable(); stat_.set(index, builderForValue.build()); onChanged(); } else { statBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder addStat(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (statBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStatIsMutable(); stat_.add(value); onChanged(); } else { statBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder addStat( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (statBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStatIsMutable(); stat_.add(index, value); onChanged(); } else { statBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder addStat( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) { if (statBuilder_ == null) { ensureStatIsMutable(); stat_.add(builderForValue.build()); onChanged(); } else { statBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder addStat( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) { if (statBuilder_ == null) { ensureStatIsMutable(); stat_.add(index, builderForValue.build()); onChanged(); } else { statBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder addAllStat( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats> values) { if (statBuilder_ == null) { ensureStatIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, stat_); onChanged(); } else { statBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder clearStat() { if (statBuilder_ == null) { stat_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { statBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public Builder removeStat(int index) { if (statBuilder_ == null) { ensureStatIsMutable(); stat_.remove(index); onChanged(); } else { statBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder getStatBuilder( int index) { return getStatFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getStatOrBuilder( int index) { if (statBuilder_ == null) { return stat_.get(index); } else { return statBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getStatOrBuilderList() { if (statBuilder_ != null) { return statBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(stat_); } } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder addStatBuilder() { return getStatFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder addStatBuilder( int index) { return getStatFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionLoadStats stat = 2;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder> getStatBuilderList() { return getStatFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getStatFieldBuilder() { if (statBuilder_ == null) { statBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>( stat_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); stat_ = null; } return statBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRegionLoadStats) } // @@protoc_insertion_point(class_scope:hbase.pb.MultiRegionLoadStats) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiRegionLoadStats> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MultiRegionLoadStats>() { public MultiRegionLoadStats parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MultiRegionLoadStats(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiRegionLoadStats> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiRegionLoadStats> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface ResultOrExceptionOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.ResultOrException) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ boolean hasIndex(); /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ int getIndex(); /** * <code>optional .hbase.pb.Result result = 2;</code> */ boolean hasResult(); /** * <code>optional .hbase.pb.Result result = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult(); /** * <code>optional .hbase.pb.Result result = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ boolean hasException(); /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException(); /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ boolean hasServiceResult(); /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult(); /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder(); /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated boolean hasLoadStats(); /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats(); /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder(); } /** * <pre> ** * Either a Result or an Exception NameBytesPair (keyed by * exception name whose value is the exception stringified) * or maybe empty if no result and no exception. * </pre> * * Protobuf type {@code hbase.pb.ResultOrException} */ public static final class ResultOrException extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.ResultOrException) ResultOrExceptionOrBuilder { // Use ResultOrException.newBuilder() to construct. private ResultOrException(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ResultOrException() { index_ = 0; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ResultOrException( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; index_ = input.readUInt32(); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = result_.toBuilder(); } result_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(result_); result_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = exception_.toBuilder(); } exception_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(exception_); exception_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } case 34: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder subBuilder = null; if (((bitField0_ & 0x00000008) == 0x00000008)) { subBuilder = serviceResult_.toBuilder(); } serviceResult_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(serviceResult_); serviceResult_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000008; break; } case 42: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = loadStats_.toBuilder(); } loadStats_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(loadStats_); loadStats_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder.class); } private int bitField0_; public static final int INDEX_FIELD_NUMBER = 1; private int index_; /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ public int getIndex() { return index_; } public static final int RESULT_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_; /** * <code>optional .hbase.pb.Result result = 2;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } public static final int EXCEPTION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_; /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public boolean hasException() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } public static final int SERVICE_RESULT_FIELD_NUMBER = 4; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_; /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public boolean hasServiceResult() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() { return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() { return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } public static final int LOADSTATS_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats loadStats_; /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasLoadStats() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() { return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() { return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasException()) { if (!getException().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasServiceResult()) { if (!getServiceResult().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt32(1, index_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, getResult()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, getException()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeMessage(4, getServiceResult()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, getLoadStats()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, index_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getResult()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getException()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(4, getServiceResult()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(5, getLoadStats()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException) obj; boolean result = true; result = result && (hasIndex() == other.hasIndex()); if (hasIndex()) { result = result && (getIndex() == other.getIndex()); } result = result && (hasResult() == other.hasResult()); if (hasResult()) { result = result && getResult() .equals(other.getResult()); } result = result && (hasException() == other.hasException()); if (hasException()) { result = result && getException() .equals(other.getException()); } result = result && (hasServiceResult() == other.hasServiceResult()); if (hasServiceResult()) { result = result && getServiceResult() .equals(other.getServiceResult()); } result = result && (hasLoadStats() == other.hasLoadStats()); if (hasLoadStats()) { result = result && getLoadStats() .equals(other.getLoadStats()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIndex()) { hash = (37 * hash) + INDEX_FIELD_NUMBER; hash = (53 * hash) + getIndex(); } if (hasResult()) { hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); } if (hasException()) { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } if (hasServiceResult()) { hash = (37 * hash) + SERVICE_RESULT_FIELD_NUMBER; hash = (53 * hash) + getServiceResult().hashCode(); } if (hasLoadStats()) { hash = (37 * hash) + LOADSTATS_FIELD_NUMBER; hash = (53 * hash) + getLoadStats().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Either a Result or an Exception NameBytesPair (keyed by * exception name whose value is the exception stringified) * or maybe empty if no result and no exception. * </pre> * * Protobuf type {@code hbase.pb.ResultOrException} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.ResultOrException) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResultFieldBuilder(); getExceptionFieldBuilder(); getServiceResultFieldBuilder(); getLoadStatsFieldBuilder(); } } public Builder clear() { super.clear(); index_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (resultBuilder_ == null) { result_ = null; } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (exceptionBuilder_ == null) { exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); if (serviceResultBuilder_ == null) { serviceResult_ = null; } else { serviceResultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); if (loadStatsBuilder_ == null) { loadStats_ = null; } else { loadStatsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_ResultOrException_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.index_ = index_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (resultBuilder_ == null) { result.result_ = result_; } else { result.result_ = resultBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (exceptionBuilder_ == null) { result.exception_ = exception_; } else { result.exception_ = exceptionBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } if (serviceResultBuilder_ == null) { result.serviceResult_ = serviceResult_; } else { result.serviceResult_ = serviceResultBuilder_.build(); } if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (loadStatsBuilder_ == null) { result.loadStats_ = loadStats_; } else { result.loadStats_ = loadStatsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()) return this; if (other.hasIndex()) { setIndex(other.getIndex()); } if (other.hasResult()) { mergeResult(other.getResult()); } if (other.hasException()) { mergeException(other.getException()); } if (other.hasServiceResult()) { mergeServiceResult(other.getServiceResult()); } if (other.hasLoadStats()) { mergeLoadStats(other.getLoadStats()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { if (hasException()) { if (!getException().isInitialized()) { return false; } } if (hasServiceResult()) { if (!getServiceResult().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int index_ ; /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ public boolean hasIndex() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ public int getIndex() { return index_; } /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ public Builder setIndex(int value) { bitField0_ |= 0x00000001; index_ = value; onChanged(); return this; } /** * <pre> * If part of a multi call, save original index of the list of all * passed so can align this response w/ original request. * </pre> * * <code>optional uint32 index = 1;</code> */ public Builder clearIndex() { bitField0_ = (bitField0_ & ~0x00000001); index_ = 0; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result result_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; /** * <code>optional .hbase.pb.Result result = 2;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } else { return resultBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public Builder setResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } result_ = value; onChanged(); } else { resultBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public Builder setResult( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); } else { resultBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public Builder mergeResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && result_ != null && result_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); } else { result_ = value; } onChanged(); } else { resultBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public Builder clearResult() { if (resultBuilder_ == null) { result_ = null; onChanged(); } else { resultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000002; onChanged(); return getResultFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Result result = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { return result_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.getDefaultInstance() : result_; } } /** * <code>optional .hbase.pb.Result result = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { resultBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrBuilder>( getResult(), getParentForChildren(), isClean()); result_ = null; } return resultBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public boolean hasException() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { if (exceptionBuilder_ == null) { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exception_ = value; onChanged(); } else { exceptionBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (exceptionBuilder_ == null) { exception_ = builderForValue.build(); onChanged(); } else { exceptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); } else { exception_ = value; } onChanged(); } else { exceptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { bitField0_ |= 0x00000004; onChanged(); return getExceptionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } } /** * <code>optional .hbase.pb.NameBytesPair exception = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { exceptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> serviceResultBuilder_; /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public boolean hasServiceResult() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() { if (serviceResultBuilder_ == null) { return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } else { return serviceResultBuilder_.getMessage(); } } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public Builder setServiceResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult value) { if (serviceResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } serviceResult_ = value; onChanged(); } else { serviceResultBuilder_.setMessage(value); } bitField0_ |= 0x00000008; return this; } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public Builder setServiceResult( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue) { if (serviceResultBuilder_ == null) { serviceResult_ = builderForValue.build(); onChanged(); } else { serviceResultBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public Builder mergeServiceResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult value) { if (serviceResultBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && serviceResult_ != null && serviceResult_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) { serviceResult_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder(serviceResult_).mergeFrom(value).buildPartial(); } else { serviceResult_ = value; } onChanged(); } else { serviceResultBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public Builder clearServiceResult() { if (serviceResultBuilder_ == null) { serviceResult_ = null; onChanged(); } else { serviceResultBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); return this; } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder getServiceResultBuilder() { bitField0_ |= 0x00000008; onChanged(); return getServiceResultFieldBuilder().getBuilder(); } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() { if (serviceResultBuilder_ != null) { return serviceResultBuilder_.getMessageOrBuilder(); } else { return serviceResult_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance() : serviceResult_; } } /** * <pre> * result if this was a coprocessor service call * </pre> * * <code>optional .hbase.pb.CoprocessorServiceResult service_result = 4;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> getServiceResultFieldBuilder() { if (serviceResultBuilder_ == null) { serviceResultBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>( getServiceResult(), getParentForChildren(), isClean()); serviceResult_ = null; } return serviceResultBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> loadStatsBuilder_; /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public boolean hasLoadStats() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() { if (loadStatsBuilder_ == null) { return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } else { return loadStatsBuilder_.getMessage(); } } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setLoadStats(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (loadStatsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } loadStats_ = value; onChanged(); } else { loadStatsBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setLoadStats( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) { if (loadStatsBuilder_ == null) { loadStats_ = builderForValue.build(); onChanged(); } else { loadStatsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder mergeLoadStats(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats value) { if (loadStatsBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && loadStats_ != null && loadStats_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) { loadStats_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder(loadStats_).mergeFrom(value).buildPartial(); } else { loadStats_ = value; } onChanged(); } else { loadStatsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public Builder clearLoadStats() { if (loadStatsBuilder_ == null) { loadStats_ = null; onChanged(); } else { loadStatsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder getLoadStatsBuilder() { bitField0_ |= 0x00000010; onChanged(); return getLoadStatsFieldBuilder().getBuilder(); } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ @java.lang.Deprecated public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() { if (loadStatsBuilder_ != null) { return loadStatsBuilder_.getMessageOrBuilder(); } else { return loadStats_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance() : loadStats_; } } /** * <pre> * current load on the region * </pre> * * <code>optional .hbase.pb.RegionLoadStats loadStats = 5 [deprecated = true];</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> getLoadStatsFieldBuilder() { if (loadStatsBuilder_ == null) { loadStatsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>( getLoadStats(), getParentForChildren(), isClean()); loadStats_ = null; } return loadStatsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.ResultOrException) } // @@protoc_insertion_point(class_scope:hbase.pb.ResultOrException) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ResultOrException> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ResultOrException>() { public ResultOrException parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new ResultOrException(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ResultOrException> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ResultOrException> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface RegionActionResultOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.RegionActionResult) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList(); /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index); /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ int getResultOrExceptionCount(); /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> getResultOrExceptionOrBuilderList(); /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( int index); /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ boolean hasException(); /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException(); /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); } /** * <pre> ** * The result of a RegionAction. * </pre> * * Protobuf type {@code hbase.pb.RegionActionResult} */ public static final class RegionActionResult extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.RegionActionResult) RegionActionResultOrBuilder { // Use RegionActionResult.newBuilder() to construct. private RegionActionResult(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionActionResult() { resultOrException_ = java.util.Collections.emptyList(); } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionActionResult( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException>(); mutable_bitField0_ |= 0x00000001; } resultOrException_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry)); break; } case 18: { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = exception_.toBuilder(); } exception_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(exception_); exception_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder.class); } private int bitField0_; public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException> resultOrException_; /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() { return resultOrException_; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> getResultOrExceptionOrBuilderList() { return resultOrException_; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public int getResultOrExceptionCount() { return resultOrException_.size(); } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) { return resultOrException_.get(index); } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( int index) { return resultOrException_.get(index); } public static final int EXCEPTION_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_; /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public boolean hasException() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getResultOrExceptionCount(); i++) { if (!getResultOrException(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasException()) { if (!getException().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < resultOrException_.size(); i++) { output.writeMessage(1, resultOrException_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(2, getException()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < resultOrException_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, resultOrException_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(2, getException()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) obj; boolean result = true; result = result && getResultOrExceptionList() .equals(other.getResultOrExceptionList()); result = result && (hasException() == other.hasException()); if (hasException()) { result = result && getException() .equals(other.getException()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getResultOrExceptionCount() > 0) { hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getResultOrExceptionList().hashCode(); } if (hasException()) { hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; hash = (53 * hash) + getException().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * The result of a RegionAction. * </pre> * * Protobuf type {@code hbase.pb.RegionActionResult} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.RegionActionResult) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getResultOrExceptionFieldBuilder(); getExceptionFieldBuilder(); } } public Builder clear() { super.clear(); if (resultOrExceptionBuilder_ == null) { resultOrException_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { resultOrExceptionBuilder_.clear(); } if (exceptionBuilder_ == null) { exception_ = null; } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_RegionActionResult_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (resultOrExceptionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); bitField0_ = (bitField0_ & ~0x00000001); } result.resultOrException_ = resultOrException_; } else { result.resultOrException_ = resultOrExceptionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } if (exceptionBuilder_ == null) { result.exception_ = exception_; } else { result.exception_ = exceptionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()) return this; if (resultOrExceptionBuilder_ == null) { if (!other.resultOrException_.isEmpty()) { if (resultOrException_.isEmpty()) { resultOrException_ = other.resultOrException_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResultOrExceptionIsMutable(); resultOrException_.addAll(other.resultOrException_); } onChanged(); } } else { if (!other.resultOrException_.isEmpty()) { if (resultOrExceptionBuilder_.isEmpty()) { resultOrExceptionBuilder_.dispose(); resultOrExceptionBuilder_ = null; resultOrException_ = other.resultOrException_; bitField0_ = (bitField0_ & ~0x00000001); resultOrExceptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResultOrExceptionFieldBuilder() : null; } else { resultOrExceptionBuilder_.addAllMessages(other.resultOrException_); } } } if (other.hasException()) { mergeException(other.getException()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getResultOrExceptionCount(); i++) { if (!getResultOrException(i).isInitialized()) { return false; } } if (hasException()) { if (!getException().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException> resultOrException_ = java.util.Collections.emptyList(); private void ensureResultOrExceptionIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException>(resultOrException_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_; /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() { if (resultOrExceptionBuilder_ == null) { return java.util.Collections.unmodifiableList(resultOrException_); } else { return resultOrExceptionBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public int getResultOrExceptionCount() { if (resultOrExceptionBuilder_ == null) { return resultOrException_.size(); } else { return resultOrExceptionBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) { if (resultOrExceptionBuilder_ == null) { return resultOrException_.get(index); } else { return resultOrExceptionBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder setResultOrException( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException value) { if (resultOrExceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultOrExceptionIsMutable(); resultOrException_.set(index, value); onChanged(); } else { resultOrExceptionBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder setResultOrException( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) { if (resultOrExceptionBuilder_ == null) { ensureResultOrExceptionIsMutable(); resultOrException_.set(index, builderForValue.build()); onChanged(); } else { resultOrExceptionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder addResultOrException(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException value) { if (resultOrExceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultOrExceptionIsMutable(); resultOrException_.add(value); onChanged(); } else { resultOrExceptionBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder addResultOrException( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException value) { if (resultOrExceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultOrExceptionIsMutable(); resultOrException_.add(index, value); onChanged(); } else { resultOrExceptionBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder addResultOrException( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) { if (resultOrExceptionBuilder_ == null) { ensureResultOrExceptionIsMutable(); resultOrException_.add(builderForValue.build()); onChanged(); } else { resultOrExceptionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder addResultOrException( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) { if (resultOrExceptionBuilder_ == null) { ensureResultOrExceptionIsMutable(); resultOrException_.add(index, builderForValue.build()); onChanged(); } else { resultOrExceptionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder addAllResultOrException( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException> values) { if (resultOrExceptionBuilder_ == null) { ensureResultOrExceptionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, resultOrException_); onChanged(); } else { resultOrExceptionBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder clearResultOrException() { if (resultOrExceptionBuilder_ == null) { resultOrException_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { resultOrExceptionBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public Builder removeResultOrException(int index) { if (resultOrExceptionBuilder_ == null) { ensureResultOrExceptionIsMutable(); resultOrException_.remove(index); onChanged(); } else { resultOrExceptionBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder getResultOrExceptionBuilder( int index) { return getResultOrExceptionFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( int index) { if (resultOrExceptionBuilder_ == null) { return resultOrException_.get(index); } else { return resultOrExceptionBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> getResultOrExceptionOrBuilderList() { if (resultOrExceptionBuilder_ != null) { return resultOrExceptionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(resultOrException_); } } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder() { return getResultOrExceptionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder( int index) { return getResultOrExceptionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()); } /** * <code>repeated .hbase.pb.ResultOrException resultOrException = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder> getResultOrExceptionBuilderList() { return getResultOrExceptionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> getResultOrExceptionFieldBuilder() { if (resultOrExceptionBuilder_ == null) { resultOrExceptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>( resultOrException_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); resultOrException_ = null; } return resultOrExceptionBuilder_; } private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair exception_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair getException() { if (exceptionBuilder_ == null) { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } else { return exceptionBuilder_.getMessage(); } } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public Builder setException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } exception_ = value; onChanged(); } else { exceptionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public Builder setException( org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (exceptionBuilder_ == null) { exception_ = builderForValue.build(); onChanged(); } else { exceptionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public Builder mergeException(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair value) { if (exceptionBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && exception_ != null && exception_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { exception_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); } else { exception_ = value; } onChanged(); } else { exceptionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public Builder clearException() { if (exceptionBuilder_ == null) { exception_ = null; onChanged(); } else { exceptionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExceptionFieldBuilder().getBuilder(); } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { if (exceptionBuilder_ != null) { return exceptionBuilder_.getMessageOrBuilder(); } else { return exception_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance() : exception_; } } /** * <pre> * If the operation failed globally for this region, this exception is set * </pre> * * <code>optional .hbase.pb.NameBytesPair exception = 2;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getExceptionFieldBuilder() { if (exceptionBuilder_ == null) { exceptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( getException(), getParentForChildren(), isClean()); exception_ = null; } return exceptionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionActionResult) } // @@protoc_insertion_point(class_scope:hbase.pb.RegionActionResult) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionActionResult> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<RegionActionResult>() { public RegionActionResult parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new RegionActionResult(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionActionResult> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<RegionActionResult> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MultiRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MultiRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction> getRegionActionList(); /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index); /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ int getRegionActionCount(); /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> getRegionActionOrBuilderList(); /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder( int index); /** * <code>optional uint64 nonceGroup = 2;</code> */ boolean hasNonceGroup(); /** * <code>optional uint64 nonceGroup = 2;</code> */ long getNonceGroup(); /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ boolean hasCondition(); /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition(); /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); } /** * <pre> ** * Execute a list of actions on a given region in order. * Nothing prevents a request to contains a set of RegionAction on the same region. * For this reason, the matching between the MultiRequest and the MultiResponse is not * done by the region specifier but by keeping the order of the RegionActionResult vs. * the order of the RegionAction. * </pre> * * Protobuf type {@code hbase.pb.MultiRequest} */ public static final class MultiRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MultiRequest) MultiRequestOrBuilder { // Use MultiRequest.newBuilder() to construct. private MultiRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MultiRequest() { regionAction_ = java.util.Collections.emptyList(); nonceGroup_ = 0L; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MultiRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction>(); mutable_bitField0_ |= 0x00000001; } regionAction_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry)); break; } case 16: { bitField0_ |= 0x00000001; nonceGroup_ = input.readUInt64(); break; } case 26: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = condition_.toBuilder(); } condition_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(condition_); condition_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionAction_ = java.util.Collections.unmodifiableList(regionAction_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.Builder.class); } private int bitField0_; public static final int REGIONACTION_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction> regionAction_; /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() { return regionAction_; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> getRegionActionOrBuilderList() { return regionAction_; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public int getRegionActionCount() { return regionAction_.size(); } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) { return regionAction_.get(index); } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder( int index) { return regionAction_.get(index); } public static final int NONCEGROUP_FIELD_NUMBER = 2; private long nonceGroup_; /** * <code>optional uint64 nonceGroup = 2;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional uint64 nonceGroup = 2;</code> */ public long getNonceGroup() { return nonceGroup_; } public static final int CONDITION_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_; /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public boolean hasCondition() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getRegionActionCount(); i++) { if (!getRegionAction(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasCondition()) { if (!getCondition().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < regionAction_.size(); i++) { output.writeMessage(1, regionAction_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeUInt64(2, nonceGroup_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(3, getCondition()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < regionAction_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, regionAction_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, nonceGroup_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getCondition()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest) obj; boolean result = true; result = result && getRegionActionList() .equals(other.getRegionActionList()); result = result && (hasNonceGroup() == other.hasNonceGroup()); if (hasNonceGroup()) { result = result && (getNonceGroup() == other.getNonceGroup()); } result = result && (hasCondition() == other.hasCondition()); if (hasCondition()) { result = result && getCondition() .equals(other.getCondition()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRegionActionCount() > 0) { hash = (37 * hash) + REGIONACTION_FIELD_NUMBER; hash = (53 * hash) + getRegionActionList().hashCode(); } if (hasNonceGroup()) { hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( getNonceGroup()); } if (hasCondition()) { hash = (37 * hash) + CONDITION_FIELD_NUMBER; hash = (53 * hash) + getCondition().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> ** * Execute a list of actions on a given region in order. * Nothing prevents a request to contains a set of RegionAction on the same region. * For this reason, the matching between the MultiRequest and the MultiResponse is not * done by the region specifier but by keeping the order of the RegionActionResult vs. * the order of the RegionAction. * </pre> * * Protobuf type {@code hbase.pb.MultiRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MultiRequest) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionActionFieldBuilder(); getConditionFieldBuilder(); } } public Builder clear() { super.clear(); if (regionActionBuilder_ == null) { regionAction_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { regionActionBuilder_.clear(); } nonceGroup_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); if (conditionBuilder_ == null) { condition_ = null; } else { conditionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiRequest_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (regionActionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { regionAction_ = java.util.Collections.unmodifiableList(regionAction_); bitField0_ = (bitField0_ & ~0x00000001); } result.regionAction_ = regionAction_; } else { result.regionAction_ = regionActionBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.nonceGroup_ = nonceGroup_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } if (conditionBuilder_ == null) { result.condition_ = condition_; } else { result.condition_ = conditionBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; if (regionActionBuilder_ == null) { if (!other.regionAction_.isEmpty()) { if (regionAction_.isEmpty()) { regionAction_ = other.regionAction_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRegionActionIsMutable(); regionAction_.addAll(other.regionAction_); } onChanged(); } } else { if (!other.regionAction_.isEmpty()) { if (regionActionBuilder_.isEmpty()) { regionActionBuilder_.dispose(); regionActionBuilder_ = null; regionAction_ = other.regionAction_; bitField0_ = (bitField0_ & ~0x00000001); regionActionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionActionFieldBuilder() : null; } else { regionActionBuilder_.addAllMessages(other.regionAction_); } } } if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); } if (other.hasCondition()) { mergeCondition(other.getCondition()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionActionCount(); i++) { if (!getRegionAction(i).isInitialized()) { return false; } } if (hasCondition()) { if (!getCondition().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction> regionAction_ = java.util.Collections.emptyList(); private void ensureRegionActionIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction>(regionAction_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> regionActionBuilder_; /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() { if (regionActionBuilder_ == null) { return java.util.Collections.unmodifiableList(regionAction_); } else { return regionActionBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public int getRegionActionCount() { if (regionActionBuilder_ == null) { return regionAction_.size(); } else { return regionActionBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) { if (regionActionBuilder_ == null) { return regionAction_.get(index); } else { return regionActionBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder setRegionAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction value) { if (regionActionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionActionIsMutable(); regionAction_.set(index, value); onChanged(); } else { regionActionBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder setRegionAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) { if (regionActionBuilder_ == null) { ensureRegionActionIsMutable(); regionAction_.set(index, builderForValue.build()); onChanged(); } else { regionActionBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder addRegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction value) { if (regionActionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionActionIsMutable(); regionAction_.add(value); onChanged(); } else { regionActionBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder addRegionAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction value) { if (regionActionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionActionIsMutable(); regionAction_.add(index, value); onChanged(); } else { regionActionBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder addRegionAction( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) { if (regionActionBuilder_ == null) { ensureRegionActionIsMutable(); regionAction_.add(builderForValue.build()); onChanged(); } else { regionActionBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder addRegionAction( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) { if (regionActionBuilder_ == null) { ensureRegionActionIsMutable(); regionAction_.add(index, builderForValue.build()); onChanged(); } else { regionActionBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder addAllRegionAction( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction> values) { if (regionActionBuilder_ == null) { ensureRegionActionIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, regionAction_); onChanged(); } else { regionActionBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder clearRegionAction() { if (regionActionBuilder_ == null) { regionAction_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { regionActionBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public Builder removeRegionAction(int index) { if (regionActionBuilder_ == null) { ensureRegionActionIsMutable(); regionAction_.remove(index); onChanged(); } else { regionActionBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder getRegionActionBuilder( int index) { return getRegionActionFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder( int index) { if (regionActionBuilder_ == null) { return regionAction_.get(index); } else { return regionActionBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> getRegionActionOrBuilderList() { if (regionActionBuilder_ != null) { return regionActionBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(regionAction_); } } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder() { return getRegionActionFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder( int index) { return getRegionActionFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionAction regionAction = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder> getRegionActionBuilderList() { return getRegionActionFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder> getRegionActionFieldBuilder() { if (regionActionBuilder_ == null) { regionActionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionOrBuilder>( regionAction_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); regionAction_ = null; } return regionActionBuilder_; } private long nonceGroup_ ; /** * <code>optional uint64 nonceGroup = 2;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional uint64 nonceGroup = 2;</code> */ public long getNonceGroup() { return nonceGroup_; } /** * <code>optional uint64 nonceGroup = 2;</code> */ public Builder setNonceGroup(long value) { bitField0_ |= 0x00000002; nonceGroup_ = value; onChanged(); return this; } /** * <code>optional uint64 nonceGroup = 2;</code> */ public Builder clearNonceGroup() { bitField0_ = (bitField0_ & ~0x00000002); nonceGroup_ = 0L; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition condition_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition getCondition() { if (conditionBuilder_ == null) { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } else { return conditionBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder setCondition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } condition_ = value; onChanged(); } else { conditionBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder setCondition( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { if (conditionBuilder_ == null) { condition_ = builderForValue.build(); onChanged(); } else { conditionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder mergeCondition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && condition_ != null && condition_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { condition_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); } else { condition_ = value; } onChanged(); } else { conditionBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public Builder clearCondition() { if (conditionBuilder_ == null) { condition_ = null; onChanged(); } else { conditionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { bitField0_ |= 0x00000004; onChanged(); return getConditionFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { if (conditionBuilder_ != null) { return conditionBuilder_.getMessageOrBuilder(); } else { return condition_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.getDefaultInstance() : condition_; } } /** * <code>optional .hbase.pb.Condition condition = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder() { if (conditionBuilder_ == null) { conditionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ConditionOrBuilder>( getCondition(), getParentForChildren(), isClean()); condition_ = null; } return conditionBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MultiRequest) } // @@protoc_insertion_point(class_scope:hbase.pb.MultiRequest) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiRequest> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MultiRequest>() { public MultiRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MultiRequest(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiRequest> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiRequest> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface MultiResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:hbase.pb.MultiResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList(); /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index); /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ int getRegionActionResultCount(); /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> getRegionActionResultOrBuilderList(); /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder( int index); /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ boolean hasProcessed(); /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ boolean getProcessed(); /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ boolean hasRegionStatistics(); /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics(); /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder(); } /** * Protobuf type {@code hbase.pb.MultiResponse} */ public static final class MultiResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:hbase.pb.MultiResponse) MultiResponseOrBuilder { // Use MultiResponse.newBuilder() to construct. private MultiResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MultiResponse() { regionActionResult_ = java.util.Collections.emptyList(); processed_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MultiResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult>(); mutable_bitField0_ |= 0x00000001; } regionActionResult_.add( input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry)); break; } case 16: { bitField0_ |= 0x00000001; processed_ = input.readBool(); break; } case 26: { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = regionStatistics_.toBuilder(); } regionStatistics_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(regionStatistics_); regionStatistics_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.Builder.class); } private int bitField0_; public static final int REGIONACTIONRESULT_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_; /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() { return regionActionResult_; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> getRegionActionResultOrBuilderList() { return regionActionResult_; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public int getRegionActionResultCount() { return regionActionResult_.size(); } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) { return regionActionResult_.get(index); } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder( int index) { return regionActionResult_.get(index); } public static final int PROCESSED_FIELD_NUMBER = 2; private boolean processed_; /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean hasProcessed() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean getProcessed() { return processed_; } public static final int REGIONSTATISTICS_FIELD_NUMBER = 3; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_; /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public boolean hasRegionStatistics() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics() { return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder() { return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; for (int i = 0; i < getRegionActionResultCount(); i++) { if (!getRegionActionResult(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasRegionStatistics()) { if (!getRegionStatistics().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < regionActionResult_.size(); i++) { output.writeMessage(1, regionActionResult_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(2, processed_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(3, getRegionStatistics()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < regionActionResult_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, regionActionResult_.get(i)); } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(2, processed_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(3, getRegionStatistics()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse) obj; boolean result = true; result = result && getRegionActionResultList() .equals(other.getRegionActionResultList()); result = result && (hasProcessed() == other.hasProcessed()); if (hasProcessed()) { result = result && (getProcessed() == other.getProcessed()); } result = result && (hasRegionStatistics() == other.hasRegionStatistics()); if (hasRegionStatistics()) { result = result && getRegionStatistics() .equals(other.getRegionStatistics()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRegionActionResultCount() > 0) { hash = (37 * hash) + REGIONACTIONRESULT_FIELD_NUMBER; hash = (53 * hash) + getRegionActionResultList().hashCode(); } if (hasProcessed()) { hash = (37 * hash) + PROCESSED_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getProcessed()); } if (hasRegionStatistics()) { hash = (37 * hash) + REGIONSTATISTICS_FIELD_NUMBER; hash = (53 * hash) + getRegionStatistics().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.MultiResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hbase.pb.MultiResponse) org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getRegionActionResultFieldBuilder(); getRegionStatisticsFieldBuilder(); } } public Builder clear() { super.clear(); if (regionActionResultBuilder_ == null) { regionActionResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { regionActionResultBuilder_.clear(); } processed_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (regionStatisticsBuilder_ == null) { regionStatistics_ = null; } else { regionStatisticsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_MultiResponse_descriptor; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse build() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (regionActionResultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_); bitField0_ = (bitField0_ & ~0x00000001); } result.regionActionResult_ = regionActionResult_; } else { result.regionActionResult_ = regionActionResultBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000001; } result.processed_ = processed_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000002; } if (regionStatisticsBuilder_ == null) { result.regionStatistics_ = regionStatistics_; } else { result.regionStatistics_ = regionStatisticsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; if (regionActionResultBuilder_ == null) { if (!other.regionActionResult_.isEmpty()) { if (regionActionResult_.isEmpty()) { regionActionResult_ = other.regionActionResult_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRegionActionResultIsMutable(); regionActionResult_.addAll(other.regionActionResult_); } onChanged(); } } else { if (!other.regionActionResult_.isEmpty()) { if (regionActionResultBuilder_.isEmpty()) { regionActionResultBuilder_.dispose(); regionActionResultBuilder_ = null; regionActionResult_ = other.regionActionResult_; bitField0_ = (bitField0_ & ~0x00000001); regionActionResultBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRegionActionResultFieldBuilder() : null; } else { regionActionResultBuilder_.addAllMessages(other.regionActionResult_); } } } if (other.hasProcessed()) { setProcessed(other.getProcessed()); } if (other.hasRegionStatistics()) { mergeRegionStatistics(other.getRegionStatistics()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { for (int i = 0; i < getRegionActionResultCount(); i++) { if (!getRegionActionResult(i).isInitialized()) { return false; } } if (hasRegionStatistics()) { if (!getRegionStatistics().isInitialized()) { return false; } } return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_ = java.util.Collections.emptyList(); private void ensureRegionActionResultIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult>(regionActionResult_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> regionActionResultBuilder_; /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() { if (regionActionResultBuilder_ == null) { return java.util.Collections.unmodifiableList(regionActionResult_); } else { return regionActionResultBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public int getRegionActionResultCount() { if (regionActionResultBuilder_ == null) { return regionActionResult_.size(); } else { return regionActionResultBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) { if (regionActionResultBuilder_ == null) { return regionActionResult_.get(index); } else { return regionActionResultBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder setRegionActionResult( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult value) { if (regionActionResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionActionResultIsMutable(); regionActionResult_.set(index, value); onChanged(); } else { regionActionResultBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder setRegionActionResult( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { if (regionActionResultBuilder_ == null) { ensureRegionActionResultIsMutable(); regionActionResult_.set(index, builderForValue.build()); onChanged(); } else { regionActionResultBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder addRegionActionResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult value) { if (regionActionResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionActionResultIsMutable(); regionActionResult_.add(value); onChanged(); } else { regionActionResultBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder addRegionActionResult( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult value) { if (regionActionResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRegionActionResultIsMutable(); regionActionResult_.add(index, value); onChanged(); } else { regionActionResultBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder addRegionActionResult( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { if (regionActionResultBuilder_ == null) { ensureRegionActionResultIsMutable(); regionActionResult_.add(builderForValue.build()); onChanged(); } else { regionActionResultBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder addRegionActionResult( int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { if (regionActionResultBuilder_ == null) { ensureRegionActionResultIsMutable(); regionActionResult_.add(index, builderForValue.build()); onChanged(); } else { regionActionResultBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder addAllRegionActionResult( java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult> values) { if (regionActionResultBuilder_ == null) { ensureRegionActionResultIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( values, regionActionResult_); onChanged(); } else { regionActionResultBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder clearRegionActionResult() { if (regionActionResultBuilder_ == null) { regionActionResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { regionActionResultBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public Builder removeRegionActionResult(int index) { if (regionActionResultBuilder_ == null) { ensureRegionActionResultIsMutable(); regionActionResult_.remove(index); onChanged(); } else { regionActionResultBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder getRegionActionResultBuilder( int index) { return getRegionActionResultFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder( int index) { if (regionActionResultBuilder_ == null) { return regionActionResult_.get(index); } else { return regionActionResultBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> getRegionActionResultOrBuilderList() { if (regionActionResultBuilder_ != null) { return regionActionResultBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(regionActionResult_); } } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder() { return getRegionActionResultFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder( int index) { return getRegionActionResultFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); } /** * <code>repeated .hbase.pb.RegionActionResult regionActionResult = 1;</code> */ public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder> getRegionActionResultBuilderList() { return getRegionActionResultFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> getRegionActionResultFieldBuilder() { if (regionActionResultBuilder_ == null) { regionActionResultBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>( regionActionResult_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); regionActionResult_ = null; } return regionActionResultBuilder_; } private boolean processed_ ; /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean hasProcessed() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public boolean getProcessed() { return processed_; } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public Builder setProcessed(boolean value) { bitField0_ |= 0x00000002; processed_ = value; onChanged(); return this; } /** * <pre> * used for mutate to indicate processed only * </pre> * * <code>optional bool processed = 2;</code> */ public Builder clearProcessed() { bitField0_ = (bitField0_ & ~0x00000002); processed_ = false; onChanged(); return this; } private org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats regionStatistics_ = null; private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder> regionStatisticsBuilder_; /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public boolean hasRegionStatistics() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats getRegionStatistics() { if (regionStatisticsBuilder_ == null) { return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } else { return regionStatisticsBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public Builder setRegionStatistics(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats value) { if (regionStatisticsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } regionStatistics_ = value; onChanged(); } else { regionStatisticsBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public Builder setRegionStatistics( org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder builderForValue) { if (regionStatisticsBuilder_ == null) { regionStatistics_ = builderForValue.build(); onChanged(); } else { regionStatisticsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public Builder mergeRegionStatistics(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats value) { if (regionStatisticsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && regionStatistics_ != null && regionStatistics_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance()) { regionStatistics_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.newBuilder(regionStatistics_).mergeFrom(value).buildPartial(); } else { regionStatistics_ = value; } onChanged(); } else { regionStatisticsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public Builder clearRegionStatistics() { if (regionStatisticsBuilder_ == null) { regionStatistics_ = null; onChanged(); } else { regionStatisticsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder getRegionStatisticsBuilder() { bitField0_ |= 0x00000004; onChanged(); return getRegionStatisticsFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder getRegionStatisticsOrBuilder() { if (regionStatisticsBuilder_ != null) { return regionStatisticsBuilder_.getMessageOrBuilder(); } else { return regionStatistics_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.getDefaultInstance() : regionStatistics_; } } /** * <code>optional .hbase.pb.MultiRegionLoadStats regionStatistics = 3;</code> */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder> getRegionStatisticsFieldBuilder() { if (regionStatisticsBuilder_ == null) { regionStatisticsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStatsOrBuilder>( getRegionStatistics(), getParentForChildren(), isClean()); regionStatistics_ = null; } return regionStatisticsBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:hbase.pb.MultiResponse) } // @@protoc_insertion_point(class_scope:hbase.pb.MultiResponse) private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse(); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiResponse> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<MultiResponse>() { public MultiResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new MultiResponse(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiResponse> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<MultiResponse> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } /** * Protobuf service {@code hbase.pb.ClientService} */ public static abstract class ClientService implements org.apache.hadoop.hbase.shaded.com.google.protobuf.Service { protected ClientService() {} public interface Interface { /** * <code>rpc Get(.hbase.pb.GetRequest) returns (.hbase.pb.GetResponse);</code> */ public abstract void get( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse> done); /** * <code>rpc Mutate(.hbase.pb.MutateRequest) returns (.hbase.pb.MutateResponse);</code> */ public abstract void mutate( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse> done); /** * <code>rpc Scan(.hbase.pb.ScanRequest) returns (.hbase.pb.ScanResponse);</code> */ public abstract void scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse> done); /** * <code>rpc BulkLoadHFile(.hbase.pb.BulkLoadHFileRequest) returns (.hbase.pb.BulkLoadHFileResponse);</code> */ public abstract void bulkLoadHFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done); /** * <code>rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);</code> */ public abstract void prepareBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done); /** * <code>rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);</code> */ public abstract void cleanupBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done); /** * <code>rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code> */ public abstract void execService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); /** * <code>rpc ExecRegionServerService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code> */ public abstract void execRegionServerService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); /** * <code>rpc Multi(.hbase.pb.MultiRequest) returns (.hbase.pb.MultiResponse);</code> */ public abstract void multi( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse> done); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Service newReflectiveService( final Interface impl) { return new ClientService() { @java.lang.Override public void get( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse> done) { impl.get(controller, request, done); } @java.lang.Override public void mutate( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse> done) { impl.mutate(controller, request, done); } @java.lang.Override public void scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse> done) { impl.scan(controller, request, done); } @java.lang.Override public void bulkLoadHFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) { impl.bulkLoadHFile(controller, request, done); } @java.lang.Override public void prepareBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done) { impl.prepareBulkLoad(controller, request, done); } @java.lang.Override public void cleanupBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done) { impl.cleanupBulkLoad(controller, request, done); } @java.lang.Override public void execService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { impl.execService(controller, request, done); } @java.lang.Override public void execRegionServerService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { impl.execRegionServerService(controller, request, done); } @java.lang.Override public void multi( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse> done) { impl.multi(controller, request, done); } }; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService() { public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message callBlockingMethod( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.com.google.protobuf.Message request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.get(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest)request); case 1: return impl.mutate(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest)request); case 2: return impl.scan(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest)request); case 3: return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request); case 4: return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request); case 5: return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request); case 6: return impl.execService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); case 7: return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); case 8: return impl.multi(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message getRequestPrototype( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message getResponsePrototype( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } /** * <code>rpc Get(.hbase.pb.GetRequest) returns (.hbase.pb.GetResponse);</code> */ public abstract void get( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse> done); /** * <code>rpc Mutate(.hbase.pb.MutateRequest) returns (.hbase.pb.MutateResponse);</code> */ public abstract void mutate( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse> done); /** * <code>rpc Scan(.hbase.pb.ScanRequest) returns (.hbase.pb.ScanResponse);</code> */ public abstract void scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse> done); /** * <code>rpc BulkLoadHFile(.hbase.pb.BulkLoadHFileRequest) returns (.hbase.pb.BulkLoadHFileResponse);</code> */ public abstract void bulkLoadHFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done); /** * <code>rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);</code> */ public abstract void prepareBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done); /** * <code>rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);</code> */ public abstract void cleanupBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done); /** * <code>rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code> */ public abstract void execService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); /** * <code>rpc ExecRegionServerService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse);</code> */ public abstract void execRegionServerService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); /** * <code>rpc Multi(.hbase.pb.MultiRequest) returns (.hbase.pb.MultiResponse);</code> */ public abstract void multi( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse> done); public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0); } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.com.google.protobuf.Message request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback< org.apache.hadoop.hbase.shaded.com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.get(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse>specializeCallback( done)); return; case 1: this.mutate(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse>specializeCallback( done)); return; case 2: this.scan(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse>specializeCallback( done)); return; case 3: this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse>specializeCallback( done)); return; case 4: this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse>specializeCallback( done)); return; case 5: this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse>specializeCallback( done)); return; case 6: this.execService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback( done)); return; case 7: this.execRegionServerService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback( done)); return; case 8: this.multi(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message getRequestPrototype( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Message getResponsePrototype( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 7: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 8: return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService implements Interface { private Stub(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel channel) { this.channel = channel; } private final org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel channel; public org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel getChannel() { return channel; } public void get( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse> done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance())); } public void mutate( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse> done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance())); } public void scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse> done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance())); } public void bulkLoadHFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) { channel.callMethod( getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); } public void prepareBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse> done) { channel.callMethod( getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance())); } public void cleanupBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse> done) { channel.callMethod( getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance())); } public void execService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); } public void execRegionServerService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) { channel.callMethod( getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); } public void multi( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse> done) { channel.callMethod( getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); } } public static BlockingInterface newBlockingStub( org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse get( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse mutate( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse multi( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { private BlockingStub(org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } private final org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse get( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse mutate( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse scan( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); } public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse multi( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); } } // @@protoc_insertion_point(class_scope:hbase.pb.ClientService) } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Authorizations_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Authorizations_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellVisibility_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CellVisibility_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Column_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Column_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Get_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Get_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Result_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Result_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_GetResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_GetResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Condition_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Condition_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutationProto_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutationProto_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutationProto_ColumnValue_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutateRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutateRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MutateResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MutateResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Scan_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Scan_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ScanRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ScanRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ScanResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ScanResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadHFileRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_DelegationToken_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_DelegationToken_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceCall_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceResult_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Action_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Action_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionAction_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionAction_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionLoadStats_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiRegionLoadStats_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ResultOrException_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ResultOrException_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionActionResult_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RegionActionResult_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_MultiResponse_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_MultiResponse_fieldAccessorTable; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\014Client.proto\022\010hbase.pb\032\013HBase.proto\032\014F" + "ilter.proto\032\nCell.proto\032\020Comparator.prot" + "o\032\017MapReduce.proto\"\037\n\016Authorizations\022\r\n\005" + "label\030\001 \003(\t\"$\n\016CellVisibility\022\022\n\nexpress" + "ion\030\001 \002(\t\"+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tq" + "ualifier\030\002 \003(\014\"\276\003\n\003Get\022\013\n\003row\030\001 \002(\014\022 \n\006c" + "olumn\030\002 \003(\0132\020.hbase.pb.Column\022*\n\tattribu" + "te\030\003 \003(\0132\027.hbase.pb.NameBytesPair\022 \n\006fil" + "ter\030\004 \001(\0132\020.hbase.pb.Filter\022\'\n\ntime_rang" + "e\030\005 \001(\0132\023.hbase.pb.TimeRange\022\027\n\014max_vers", "ions\030\006 \001(\r:\0011\022\032\n\014cache_blocks\030\007 \001(\010:\004tru" + "e\022\023\n\013store_limit\030\010 \001(\r\022\024\n\014store_offset\030\t" + " \001(\r\022\035\n\016existence_only\030\n \001(\010:\005false\0222\n\013c" + "onsistency\030\014 \001(\0162\025.hbase.pb.Consistency:" + "\006STRONG\0226\n\rcf_time_range\030\r \003(\0132\037.hbase.p" + "b.ColumnFamilyTimeRange\022&\n\036load_column_f" + "amilies_on_demand\030\016 \001(\010\"\203\001\n\006Result\022\034\n\004ce" + "ll\030\001 \003(\0132\016.hbase.pb.Cell\022\035\n\025associated_c" + "ell_count\030\002 \001(\005\022\016\n\006exists\030\003 \001(\010\022\024\n\005stale" + "\030\004 \001(\010:\005false\022\026\n\007partial\030\005 \001(\010:\005false\"S\n", "\nGetRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.R" + "egionSpecifier\022\032\n\003get\030\002 \002(\0132\r.hbase.pb.G" + "et\"/\n\013GetResponse\022 \n\006result\030\001 \001(\0132\020.hbas" + "e.pb.Result\"\222\001\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016" + "\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\014com" + "pare_type\030\004 \002(\0162\025.hbase.pb.CompareType\022(" + "\n\ncomparator\030\005 \002(\0132\024.hbase.pb.Comparator" + "\"\364\006\n\rMutationProto\022\013\n\003row\030\001 \001(\014\0229\n\013mutat" + "e_type\030\002 \001(\0162$.hbase.pb.MutationProto.Mu" + "tationType\0229\n\014column_value\030\003 \003(\0132#.hbase", ".pb.MutationProto.ColumnValue\022\021\n\ttimesta" + "mp\030\004 \001(\004\022*\n\tattribute\030\005 \003(\0132\027.hbase.pb.N" + "ameBytesPair\022C\n\ndurability\030\006 \001(\0162\".hbase" + ".pb.MutationProto.Durability:\013USE_DEFAUL" + "T\022\'\n\ntime_range\030\007 \001(\0132\023.hbase.pb.TimeRan" + "ge\022\035\n\025associated_cell_count\030\010 \001(\005\022\r\n\005non" + "ce\030\t \001(\004\032\371\001\n\013ColumnValue\022\016\n\006family\030\001 \002(\014" + "\022K\n\017qualifier_value\030\002 \003(\01322.hbase.pb.Mut" + "ationProto.ColumnValue.QualifierValue\032\214\001" + "\n\016QualifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005v", "alue\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\0227\n\013delete_" + "type\030\004 \001(\0162\".hbase.pb.MutationProto.Dele" + "teType\022\014\n\004tags\030\005 \001(\014\"W\n\nDurability\022\017\n\013US" + "E_DEFAULT\020\000\022\014\n\010SKIP_WAL\020\001\022\r\n\tASYNC_WAL\020\002" + "\022\014\n\010SYNC_WAL\020\003\022\r\n\tFSYNC_WAL\020\004\">\n\014Mutatio" + "nType\022\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020" + "\002\022\n\n\006DELETE\020\003\"p\n\nDeleteType\022\026\n\022DELETE_ON" + "E_VERSION\020\000\022\034\n\030DELETE_MULTIPLE_VERSIONS\020" + "\001\022\021\n\rDELETE_FAMILY\020\002\022\031\n\025DELETE_FAMILY_VE" + "RSION\020\003\"\242\001\n\rMutateRequest\022)\n\006region\030\001 \002(", "\0132\031.hbase.pb.RegionSpecifier\022)\n\010mutation" + "\030\002 \002(\0132\027.hbase.pb.MutationProto\022&\n\tcondi" + "tion\030\003 \001(\0132\023.hbase.pb.Condition\022\023\n\013nonce" + "_group\030\004 \001(\004\"E\n\016MutateResponse\022 \n\006result" + "\030\001 \001(\0132\020.hbase.pb.Result\022\021\n\tprocessed\030\002 " + "\001(\010\"\203\006\n\004Scan\022 \n\006column\030\001 \003(\0132\020.hbase.pb." + "Column\022*\n\tattribute\030\002 \003(\0132\027.hbase.pb.Nam" + "eBytesPair\022\021\n\tstart_row\030\003 \001(\014\022\020\n\010stop_ro" + "w\030\004 \001(\014\022 \n\006filter\030\005 \001(\0132\020.hbase.pb.Filte" + "r\022\'\n\ntime_range\030\006 \001(\0132\023.hbase.pb.TimeRan", "ge\022\027\n\014max_versions\030\007 \001(\r:\0011\022\032\n\014cache_blo" + "cks\030\010 \001(\010:\004true\022\022\n\nbatch_size\030\t \001(\r\022\027\n\017m" + "ax_result_size\030\n \001(\004\022\023\n\013store_limit\030\013 \001(" + "\r\022\024\n\014store_offset\030\014 \001(\r\022&\n\036load_column_f" + "amilies_on_demand\030\r \001(\010\022\021\n\005small\030\016 \001(\010B\002" + "\030\001\022\027\n\010reversed\030\017 \001(\010:\005false\0222\n\013consisten" + "cy\030\020 \001(\0162\025.hbase.pb.Consistency:\006STRONG\022" + "\017\n\007caching\030\021 \001(\r\022\035\n\025allow_partial_result" + "s\030\022 \001(\010\0226\n\rcf_time_range\030\023 \003(\0132\037.hbase.p" + "b.ColumnFamilyTimeRange\022\032\n\017mvcc_read_poi", "nt\030\024 \001(\004:\0010\022\037\n\021include_start_row\030\025 \001(\010:\004" + "true\022\037\n\020include_stop_row\030\026 \001(\010:\005false\0222\n" + "\010readType\030\027 \001(\0162\027.hbase.pb.Scan.ReadType" + ":\007DEFAULT\".\n\010ReadType\022\013\n\007DEFAULT\020\000\022\n\n\006ST" + "REAM\020\001\022\t\n\005PREAD\020\002\"\300\002\n\013ScanRequest\022)\n\006reg" + "ion\030\001 \001(\0132\031.hbase.pb.RegionSpecifier\022\034\n\004" + "scan\030\002 \001(\0132\016.hbase.pb.Scan\022\022\n\nscanner_id" + "\030\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r\022\025\n\rclose_" + "scanner\030\005 \001(\010\022\025\n\rnext_call_seq\030\006 \001(\004\022\037\n\027" + "client_handles_partials\030\007 \001(\010\022!\n\031client_", "handles_heartbeats\030\010 \001(\010\022\032\n\022track_scan_m" + "etrics\030\t \001(\010\022\024\n\005renew\030\n \001(\010:\005false\022\030\n\rli" + "mit_of_rows\030\013 \001(\r:\0010\"\266\002\n\014ScanResponse\022\030\n" + "\020cells_per_result\030\001 \003(\r\022\022\n\nscanner_id\030\002 " + "\001(\004\022\024\n\014more_results\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\022!" + "\n\007results\030\005 \003(\0132\020.hbase.pb.Result\022\r\n\005sta" + "le\030\006 \001(\010\022\037\n\027partial_flag_per_result\030\007 \003(" + "\010\022\036\n\026more_results_in_region\030\010 \001(\010\022\031\n\021hea" + "rtbeat_message\030\t \001(\010\022+\n\014scan_metrics\030\n \001" + "(\0132\025.hbase.pb.ScanMetrics\022\032\n\017mvcc_read_p", "oint\030\013 \001(\004:\0010\"\240\002\n\024BulkLoadHFileRequest\022)" + "\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifie" + "r\022>\n\013family_path\030\002 \003(\0132).hbase.pb.BulkLo" + "adHFileRequest.FamilyPath\022\026\n\016assign_seq_" + "num\030\003 \001(\010\022+\n\010fs_token\030\004 \001(\0132\031.hbase.pb.D" + "elegationToken\022\022\n\nbulk_token\030\005 \001(\t\022\030\n\tco" + "py_file\030\006 \001(\010:\005false\032*\n\nFamilyPath\022\016\n\006fa" + "mily\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHFil" + "eResponse\022\016\n\006loaded\030\001 \002(\010\"V\n\017DelegationT" + "oken\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password\030\002 \001", "(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"l\n\026Pre" + "pareBulkLoadRequest\022\'\n\ntable_name\030\001 \002(\0132" + "\023.hbase.pb.TableName\022)\n\006region\030\002 \001(\0132\031.h" + "base.pb.RegionSpecifier\"-\n\027PrepareBulkLo" + "adResponse\022\022\n\nbulk_token\030\001 \002(\t\"W\n\026Cleanu" + "pBulkLoadRequest\022\022\n\nbulk_token\030\001 \002(\t\022)\n\006" + "region\030\002 \001(\0132\031.hbase.pb.RegionSpecifier\"" + "\031\n\027CleanupBulkLoadResponse\"a\n\026Coprocesso" + "rServiceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_nam" + "e\030\002 \002(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007request\030", "\004 \002(\014\"B\n\030CoprocessorServiceResult\022&\n\005val" + "ue\030\001 \001(\0132\027.hbase.pb.NameBytesPair\"v\n\031Cop" + "rocessorServiceRequest\022)\n\006region\030\001 \002(\0132\031" + ".hbase.pb.RegionSpecifier\022.\n\004call\030\002 \002(\0132" + " .hbase.pb.CoprocessorServiceCall\"o\n\032Cop" + "rocessorServiceResponse\022)\n\006region\030\001 \002(\0132" + "\031.hbase.pb.RegionSpecifier\022&\n\005value\030\002 \002(" + "\0132\027.hbase.pb.NameBytesPair\"\226\001\n\006Action\022\r\n" + "\005index\030\001 \001(\r\022)\n\010mutation\030\002 \001(\0132\027.hbase.p" + "b.MutationProto\022\032\n\003get\030\003 \001(\0132\r.hbase.pb.", "Get\0226\n\014service_call\030\004 \001(\0132 .hbase.pb.Cop" + "rocessorServiceCall\"k\n\014RegionAction\022)\n\006r" + "egion\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022\016" + "\n\006atomic\030\002 \001(\010\022 \n\006action\030\003 \003(\0132\020.hbase.p" + "b.Action\"c\n\017RegionLoadStats\022\027\n\014memstoreL" + "oad\030\001 \001(\005:\0010\022\030\n\rheapOccupancy\030\002 \001(\005:\0010\022\035" + "\n\022compactionPressure\030\003 \001(\005:\0010\"j\n\024MultiRe" + "gionLoadStats\022)\n\006region\030\001 \003(\0132\031.hbase.pb" + ".RegionSpecifier\022\'\n\004stat\030\002 \003(\0132\031.hbase.p" + "b.RegionLoadStats\"\336\001\n\021ResultOrException\022", "\r\n\005index\030\001 \001(\r\022 \n\006result\030\002 \001(\0132\020.hbase.p" + "b.Result\022*\n\texception\030\003 \001(\0132\027.hbase.pb.N" + "ameBytesPair\022:\n\016service_result\030\004 \001(\0132\".h" + "base.pb.CoprocessorServiceResult\0220\n\tload" + "Stats\030\005 \001(\0132\031.hbase.pb.RegionLoadStatsB\002" + "\030\001\"x\n\022RegionActionResult\0226\n\021resultOrExce" + "ption\030\001 \003(\0132\033.hbase.pb.ResultOrException" + "\022*\n\texception\030\002 \001(\0132\027.hbase.pb.NameBytes" + "Pair\"x\n\014MultiRequest\022,\n\014regionAction\030\001 \003" + "(\0132\026.hbase.pb.RegionAction\022\022\n\nnonceGroup", "\030\002 \001(\004\022&\n\tcondition\030\003 \001(\0132\023.hbase.pb.Con" + "dition\"\226\001\n\rMultiResponse\0228\n\022regionAction" + "Result\030\001 \003(\0132\034.hbase.pb.RegionActionResu" + "lt\022\021\n\tprocessed\030\002 \001(\010\0228\n\020regionStatistic" + "s\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStats*" + "\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\001" + "2\263\005\n\rClientService\0222\n\003Get\022\024.hbase.pb.Get" + "Request\032\025.hbase.pb.GetResponse\022;\n\006Mutate" + "\022\027.hbase.pb.MutateRequest\032\030.hbase.pb.Mut" + "ateResponse\0225\n\004Scan\022\025.hbase.pb.ScanReque", "st\032\026.hbase.pb.ScanResponse\022P\n\rBulkLoadHF" + "ile\022\036.hbase.pb.BulkLoadHFileRequest\032\037.hb" + "ase.pb.BulkLoadHFileResponse\022V\n\017PrepareB" + "ulkLoad\022 .hbase.pb.PrepareBulkLoadReques" + "t\032!.hbase.pb.PrepareBulkLoadResponse\022V\n\017" + "CleanupBulkLoad\022 .hbase.pb.CleanupBulkLo" + "adRequest\032!.hbase.pb.CleanupBulkLoadResp" + "onse\022X\n\013ExecService\022#.hbase.pb.Coprocess" + "orServiceRequest\032$.hbase.pb.CoprocessorS" + "erviceResponse\022d\n\027ExecRegionServerServic", "e\022#.hbase.pb.CoprocessorServiceRequest\032$" + ".hbase.pb.CoprocessorServiceResponse\0228\n\005" + "Multi\022\026.hbase.pb.MultiRequest\032\027.hbase.pb" + ".MultiResponseBI\n1org.apache.hadoop.hbas" + "e.shaded.protobuf.generatedB\014ClientProto" + "sH\001\210\001\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.getDescriptor(), }, assigner); internal_static_hbase_pb_Authorizations_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_Authorizations_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Authorizations_descriptor, new java.lang.String[] { "Label", }); internal_static_hbase_pb_CellVisibility_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_CellVisibility_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CellVisibility_descriptor, new java.lang.String[] { "Expression", }); internal_static_hbase_pb_Column_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_Column_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Column_descriptor, new java.lang.String[] { "Family", "Qualifier", }); internal_static_hbase_pb_Get_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_Get_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Get_descriptor, new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "Consistency", "CfTimeRange", "LoadColumnFamiliesOnDemand", }); internal_static_hbase_pb_Result_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_Result_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Result_descriptor, new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", }); internal_static_hbase_pb_GetRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_GetRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetRequest_descriptor, new java.lang.String[] { "Region", "Get", }); internal_static_hbase_pb_GetResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_GetResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetResponse_descriptor, new java.lang.String[] { "Result", }); internal_static_hbase_pb_Condition_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_Condition_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Condition_descriptor, new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }); internal_static_hbase_pb_MutationProto_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hbase_pb_MutationProto_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MutationProto_descriptor, new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", }); internal_static_hbase_pb_MutationProto_ColumnValue_descriptor = internal_static_hbase_pb_MutationProto_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_MutationProto_ColumnValue_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MutationProto_ColumnValue_descriptor, new java.lang.String[] { "Family", "QualifierValue", }); internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor = internal_static_hbase_pb_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MutationProto_ColumnValue_QualifierValue_descriptor, new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", }); internal_static_hbase_pb_MutateRequest_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_hbase_pb_MutateRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MutateRequest_descriptor, new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", }); internal_static_hbase_pb_MutateResponse_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_hbase_pb_MutateResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MutateResponse_descriptor, new java.lang.String[] { "Result", "Processed", }); internal_static_hbase_pb_Scan_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_hbase_pb_Scan_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Scan_descriptor, new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", "AllowPartialResults", "CfTimeRange", "MvccReadPoint", "IncludeStartRow", "IncludeStopRow", "ReadType", }); internal_static_hbase_pb_ScanRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_hbase_pb_ScanRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ScanRequest_descriptor, new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", "TrackScanMetrics", "Renew", "LimitOfRows", }); internal_static_hbase_pb_ScanResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_hbase_pb_ScanResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ScanResponse_descriptor, new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", "ScanMetrics", "MvccReadPoint", }); internal_static_hbase_pb_BulkLoadHFileRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_BulkLoadHFileRequest_descriptor, new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", "CopyFile", }); internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor = internal_static_hbase_pb_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor, new java.lang.String[] { "Family", "Path", }); internal_static_hbase_pb_BulkLoadHFileResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_BulkLoadHFileResponse_descriptor, new java.lang.String[] { "Loaded", }); internal_static_hbase_pb_DelegationToken_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_hbase_pb_DelegationToken_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DelegationToken_descriptor, new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor, new java.lang.String[] { "TableName", "Region", }); internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor, new java.lang.String[] { "BulkToken", }); internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor, new java.lang.String[] { "BulkToken", "Region", }); internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_CoprocessorServiceCall_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceCall_descriptor, new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }); internal_static_hbase_pb_CoprocessorServiceResult_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceResult_descriptor, new java.lang.String[] { "Value", }); internal_static_hbase_pb_CoprocessorServiceRequest_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceRequest_descriptor, new java.lang.String[] { "Region", "Call", }); internal_static_hbase_pb_CoprocessorServiceResponse_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceResponse_descriptor, new java.lang.String[] { "Region", "Value", }); internal_static_hbase_pb_Action_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_hbase_pb_Action_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_Action_descriptor, new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", }); internal_static_hbase_pb_RegionAction_descriptor = getDescriptor().getMessageTypes().get(26); internal_static_hbase_pb_RegionAction_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionAction_descriptor, new java.lang.String[] { "Region", "Atomic", "Action", }); internal_static_hbase_pb_RegionLoadStats_descriptor = getDescriptor().getMessageTypes().get(27); internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionLoadStats_descriptor, new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", "CompactionPressure", }); internal_static_hbase_pb_MultiRegionLoadStats_descriptor = getDescriptor().getMessageTypes().get(28); internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MultiRegionLoadStats_descriptor, new java.lang.String[] { "Region", "Stat", }); internal_static_hbase_pb_ResultOrException_descriptor = getDescriptor().getMessageTypes().get(29); internal_static_hbase_pb_ResultOrException_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ResultOrException_descriptor, new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", }); internal_static_hbase_pb_RegionActionResult_descriptor = getDescriptor().getMessageTypes().get(30); internal_static_hbase_pb_RegionActionResult_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RegionActionResult_descriptor, new java.lang.String[] { "ResultOrException", "Exception", }); internal_static_hbase_pb_MultiRequest_descriptor = getDescriptor().getMessageTypes().get(31); internal_static_hbase_pb_MultiRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MultiRequest_descriptor, new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", }); internal_static_hbase_pb_MultiResponse_descriptor = getDescriptor().getMessageTypes().get(32); internal_static_hbase_pb_MultiResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MultiResponse_descriptor, new java.lang.String[] { "RegionActionResult", "Processed", "RegionStatistics", }); org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }